def test_removing_ethereum_tokens_async( rotkehlchen_api_server, ethereum_accounts, number_of_eth_accounts, ): """Test that the rest api endpoint to add new ethereum tokens works properly""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, token_balances={ 'RDN': ['0', '0'], 'DAI': ['50000000', '0'], 'GNO': ['0', '0'], }, btc_accounts=[], ) # Remove GNO and RDN as tracked tokens and make sure that the dai balance checks out with setup.etherscan_patch: response = requests.delete(api_url_for( rotkehlchen_api_server, "ethereumtokensresource", ), json={ 'eth_tokens': ['GNO', 'RDN'], 'async_query': True }) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_modifying_ethereum_tokens( rotkehlchen_api_server, outcome, ethereum_accounts, setup, ['DAI'], )
def test_adding_ethereum_tokens_async( rotkehlchen_api_server, ethereum_accounts, ): """Test calling the rest api endpoint to add new ethereum tokens works asynchronously""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, token_balances={ A_RDN: ['0', '4000000'], A_DAI: ['50000000', '0'], A_MKR: ['1115000', '0'], A_GNO: ['0', '455552222'], }, btc_accounts=[], ) # Add RDN and MKR as tracked tokens and make sure that the rdn balance checks out with setup.etherscan_patch, setup.alethio_patch: response = requests.put(api_url_for( rotkehlchen_api_server, "ethereumtokensresource", ), json={ 'eth_tokens': ['RDN', 'MKR'], 'async_query': True }) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_modifying_ethereum_tokens( rotkehlchen_api_server, outcome, ethereum_accounts, setup, ['DAI', 'GNO', 'RDN', 'MKR'], )
def test_query_yearn_vault_v2_balances(rotkehlchen_api_server, ethereum_accounts): async_query = random.choice([True, False]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, token_balances={ '0x5f18C75AbDAe578b483E5F43f12a39cF75b973a9': ['70000000'], '0xB8C3B7A2A618C552C23B1E4701109a9E756Bab67': ['2550000000000000000000'], }, ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get(api_url_for( rotkehlchen_api_server, "yearnvaultsv2balancesresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) for _, vault in result[TEST_V2_ACC2].items(): assert '%' in vault['roi'] assert FVal(vault['vault_value']['amount']) > ZERO assert FVal(vault['vault_value']['usd_value']) > ZERO assert FVal(vault['underlying_value']['amount']) > ZERO assert FVal(vault['underlying_value']['usd_value']) > ZERO
def test_query_current_dsr_balance_async( rotkehlchen_api_server, ethereum_accounts, ): rotki = rotkehlchen_api_server.rest_api.rotkehlchen account1 = ethereum_accounts[0] account2 = ethereum_accounts[2] setup = setup_tests_for_dsr( etherscan=rotki.etherscan, account1=account1, account2=account2, original_requests_get=requests.get, ) with setup.etherscan_patch: response = requests.get(api_url_for( rotkehlchen_api_server, "makerdaodsrbalanceresource", ), json={'async_query': True}) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' assert_dsr_current_result_is_correct(outcome['result'], setup)
def test_staking_events(rotkehlchen_api_server): """Test that Trove events events are correctly queried""" async_query = random.choice([True, False]) response = requests.get( api_url_for( rotkehlchen_api_server, 'liquitystakinghistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': 0, 'to_timestamp': 1628026696, 'reset_db_data': False, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert LQTY_ADDR in result assert len(result[LQTY_ADDR]) == 1 trove_stake = result[LQTY_ADDR][0] tx_id = '0xe527749c76a3af56d86c97a8f8f8ce07e191721e9e16a0f62a228f8a8ef6d295' assert trove_stake['tx'] == tx_id assert trove_stake['timestamp'] == 1627827057 assert trove_stake['kind'] == 'stake' assert trove_stake['stake_after']['amount'] == trove_stake['stake_change'][ 'amount'] asset = trove_stake['stake_after']['asset'] assert asset == '_ceth_0x6DEA81C8171D0bA574754EF6F8b412F2Ed88c54D' assert trove_stake['stake_after']['amount'] == '177.02' assert trove_stake['stake_operation'] == 'stake created' assert trove_stake['sequence_number'] == '51676'
def test_query_compound_history(rotkehlchen_api_server, ethereum_accounts): # pylint: disable=unused-argument # noqa: E501 """Check querying the compound history endpoint works. Uses real data""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, eth_balances=['1000000', '2000000', '33000030003', '42323213'], token_balances={}, btc_accounts=None, original_queries=['zerion'], ) # Since this test can be a bit slow we don't run both async and sync in the same test run # Instead we randomly choose one. Eventually both cases will be covered. async_query = random.choice([True, False]) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get(api_url_for( rotkehlchen_api_server, "compoundhistoryresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) # Timeout of 120 since this test can take a long time outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=120) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert len(result) == 5 expected_events = process_result_list(EXPECTED_EVENTS) # Check only 22 first events, since this is how many there were in the time of # the writing of the test. Also don't check events for one of the addresses # as it's added later, has many events and it's only to see we handle repay correctly to_check_events = [ x for x in result['events'] if x['address'] != '0x65304d6aff5096472519ca86a6a1fea31cb47Ced' ] assert to_check_events[:22] == expected_events # Check one repay event other_events = [ x for x in result['events'] if x['address'] == '0x65304d6aff5096472519ca86a6a1fea31cb47Ced' ] assert other_events[12]['event_type'] == 'repay' expected_hash = '0x48a3e2ef8a746383deac34d74f2f0ea0451b2047701fbed4b9d769a782888eea' assert other_events[12]['tx_hash'] == expected_hash assert other_events[12]['value']['amount'] == '0.55064402' # Check interest profit mappings profit_0 = result['interest_profit']['0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12'] assert FVal(profit_0['DAI']['amount']) > FVal(9) profit_1 = result['interest_profit']['0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(profit_1['USDC']['amount']) > FVal(2) profit_2 = result['interest_profit']['0xF59D4937BF1305856C3a267bB07791507a3377Ee'] assert FVal(profit_2['DAI']['amount']) > FVal('0.3') # Check debt loss mappings debt_0 = result['debt_loss']['0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(debt_0['cUSDC']['amount']) > FVal('84') assert FVal(debt_0['ETH']['amount']) > FVal('0.000012422') # Check liquidation profit mappings lprofit_0 = result['liquidation_profit']['0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(lprofit_0['ETH']['amount']) > FVal('0.000012') # Check rewards mappings rewards_0 = result['rewards']['0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(rewards_0['COMP']['amount']) > FVal('0.000036') rewards_1 = result['rewards']['0xF59D4937BF1305856C3a267bB07791507a3377Ee'] assert FVal(rewards_1['COMP']['amount']) > FVal('0.003613')
def test_query_all_balances( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, ): """Test that using the query all balances endpoint works Test that balances from various sources are returned. Such as exchanges, blockchain and manually tracked balances""" async_query = random.choice([False, True]) # Disable caching of query results rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen rotki.chain_manager.cache_ttl_secs = 0 setup = setup_balances( rotki=rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts, manually_tracked_balances=[ ManuallyTrackedBalance( asset=A_EUR, label='My EUR bank', amount=FVal('1550'), location=Location.BANKS, tags=None, ) ], ) # Test that all balances request saves data on a fresh account with ExitStack() as stack: setup.enter_all_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id, ) else: outcome = assert_proper_response_with_result(response) errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 0 assert_all_balances( result=outcome, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) last_save_timestamp = rotki.data.db.get_last_balance_save_time() # now do the same but check to see if the balance save frequency delay works # and thus data will not be saved with ExitStack() as stack: setup.enter_all_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), ) assert_proper_response(response) new_save_timestamp = rotki.data.db.get_last_balance_save_time() assert last_save_timestamp == new_save_timestamp # wait for at least 1 second to make sure that new balances can be saved. # Can't save balances again if it's the same timestamp gevent.sleep(1) # now do the same but test that balance are saved since the balance save frequency delay # is overriden via `save_data` = True with ExitStack() as stack: setup.enter_all_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'save_data': True}, ) assert_proper_response(response) new_save_timestamp = rotki.data.db.get_last_balance_save_time() assert last_save_timestamp != new_save_timestamp
def test_foreignkey_conflict(rotkehlchen_api_server, globaldb): """Test that when a conflict that's not solvable happens the entry is ignored One such case is when the update of an asset would violate a foreign key constraint""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_1 = """INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * UPDATE assets SET swapped_for="_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972" WHERE identifier="_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d"; INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0xa74476443119A942dE498590Fe1f2454d7D4aC0d", 18, NULL);INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d", "C", "Golem", "GNT", 1478810650, "_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972", "golem", NULL, "0xa74476443119A942dE498590Fe1f2454d7D4aC0d"); """ # noqa: E501 update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999991, updates={ "999999991": { "changes": 2, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, } }, sql_actions={"999999991": update_1}, ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999990) start_assets_num = len(globaldb.get_all_asset_data(mapping=False)) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999990 assert result['remote'] == 999999991 assert result['new_changes'] == 2 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome[ 'message'] == 'Found conflicts during assets upgrade' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='Found conflicts during assets upgrade', status_code=HTTPStatus.CONFLICT, ) # Make sure that nothing was committed assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999990 assert len( globaldb.get_all_asset_data(mapping=False)) == start_assets_num with pytest.raises(UnknownAsset): Asset('121-ada-FADS-as') errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' # See that we get a conflict expected_result = [{ 'identifier': '_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'local': { 'name': 'Golem', 'symbol': 'GNT', 'asset_type': 'ethereum token', 'started': 1478810650, 'forked': None, 'swapped_for': '_ceth_0x7DD9c5Cba05E151C895FDe1CF355C9A1D5DA6429', 'ethereum_address': '0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'golem', 'protocol': None, }, 'remote': { 'name': 'Golem', 'symbol': 'GNT', 'asset_type': 'ethereum token', 'started': 1478810650, 'forked': None, 'swapped_for': '_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972', 'ethereum_address': '0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'golem', 'protocol': None, }, }] assert result == expected_result # now try the update again but specify the conflicts resolution conflicts = { '_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d': 'remote' } response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'conflicts': conflicts }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='', status_code=HTTPStatus.OK, ) # check new asset was added and conflict was ignored with an error due to # inability to do anything with the missing swapped_for assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999991 gnt = EthereumToken('0xa74476443119A942dE498590Fe1f2454d7D4aC0d') assert gnt.identifier == strethaddress_to_identifier( '0xa74476443119A942dE498590Fe1f2454d7D4aC0d') # noqa: E501 assert gnt.name == 'Golem' assert gnt.symbol == 'GNT' assert gnt.asset_type == AssetType.ETHEREUM_TOKEN assert gnt.started == 1478810650 assert gnt.forked is None assert gnt.swapped_for == A_GLM.identifier assert gnt.coingecko == 'golem' assert gnt.cryptocompare is None assert gnt.ethereum_address == '0xa74476443119A942dE498590Fe1f2454d7D4aC0d' assert gnt.decimals == 18 assert gnt.protocol is None new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 1 assert f'Failed to resolve conflict for {gnt.identifier} in the DB during the v999999991 assets update. Skipping entry' in warnings[ 0] # noqa: E501
def test_query_yearn_vault_history(rotkehlchen_api_server, ethereum_accounts): """Check querying the yearn vaults history endpoint works. Uses real data. This really hurts the infura data usage since it queries way too many logs. So we don't always run it in the CI. """ async_query = random.choice([True, False]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) for _ in range(2): # Run 2 times to make sure that loading data from DB the 2nd time works fine with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens (not needed with infura) setup.enter_ethereum_patches(stack) response = requests.get(api_url_for( rotkehlchen_api_server, "yearnvaultshistoryresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=600) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) # Make sure some data was saved in the DB after first call events = rotki.data.db.get_yearn_vaults_events( TEST_ACC1, YEARN_VAULTS['yyDAI+yUSDC+yUSDT+yTUSD'], ) assert len(events) >= 11 result = result[TEST_ACC1] check_vault_history('YALINK Vault', EXPECTED_HISTORY, result) check_vault_history('YCRV Vault', EXPECTED_HISTORY, result) check_vault_history('YSRENCURVE Vault', EXPECTED_HISTORY, result) check_vault_history('YUSDC Vault', EXPECTED_HISTORY, result) check_vault_history('YUSDT Vault', EXPECTED_HISTORY, result) check_vault_history('YYFI Vault', EXPECTED_HISTORY, result) # Make sure events end up in the DB # test yearn vault data purging from the db works response = requests.delete( api_url_for( rotkehlchen_api_server, 'ethereummoduledataresource', module_name='yearn_vaults', )) assert_simple_ok_response(response) events = rotki.data.db.get_yearn_vaults_events( TEST_ACC1, YEARN_VAULTS['yyDAI+yUSDC+yUSDT+yTUSD'], ) assert len(events) == 0
def test_query_eth2_info(rotkehlchen_api_server, ethereum_accounts): """This test uses real data and queries the eth2 deposit contract logs""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=[], original_queries=[ 'logs', 'transactions', 'blocknobytime', 'beaconchain' ], ) with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedetailsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 5, ) assert outcome['message'] == '' details = outcome['result'] else: details = assert_proper_response_with_result(response) with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedepositsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 5, ) assert outcome['message'] == '' deposits = outcome['result'] else: deposits = assert_proper_response_with_result(response) expected_pubkey = '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b' # noqa: E501 assert deposits[0] == { 'from_address': '0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397', 'log_index': 22, 'pubkey': expected_pubkey, 'timestamp': 1604506685, 'tx_hash': '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', 'deposit_index': 9, 'value': { 'amount': '32', 'usd_value': '32' }, 'withdrawal_credentials': '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 } assert FVal(details[0]['balance']['amount']) >= ZERO assert FVal(details[0]['balance']['usd_value']) >= ZERO assert details[0][ 'eth1_depositor'] == '0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397' # noqa: E501 assert details[0]['index'] == 9 assert details[0]['public_key'] == expected_pubkey for duration in ('1d', '1w', '1m', '1y'): performance = details[0][f'performance_{duration}'] assert FVal(performance['amount']) >= ZERO assert FVal(performance['usd_value']) >= ZERO
def test_multiple_balance_queries_not_concurrent( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, ): """Test multiple different balance query requests happening concurrently This tests that if multiple balance query requests happen concurrently we do not end up doing them multiple times, but reuse the results thanks to cache. """ rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts, btc_accounts) multieth_balance_patch = patch.object( rotki.chain_manager.ethereum, 'get_multieth_balance', wraps=rotki.chain_manager.ethereum.get_multieth_balance, ) binance = rotki.exchange_manager.connected_exchanges['binance'] binance_querydict_patch = patch.object(binance, 'api_query_dict', wraps=binance.api_query_dict) # Test all balances request by requesting to not save the data with ExitStack() as stack: setup.enter_all_patches(stack) eth = stack.enter_context(multieth_balance_patch) bn = stack.enter_context(binance_querydict_patch) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': True}, ) task_id_all = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "named_exchanges_balances_resource", name='binance', ), json={'async_query': True}) task_id_one_exchange = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True}) task_id_blockchain = assert_ok_async_response(response) outcome_all = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_all, ) outcome_one_exchange = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id_one_exchange, ) outcome_blockchain = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain, ) assert eth.call_count == 1, 'blockchain balance call should not happen concurrently' assert bn.call_count == 1, 'binance balance call should not happen concurrently' assert_all_balances( result=outcome_all, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) assert_binance_balances_result(outcome_one_exchange['result']) assert_eth_balances_result( rotki=rotki, result=outcome_blockchain, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=True, )
def test_get_events(rotkehlchen_api_server, ethereum_accounts): # pylint: disable=unused-argument async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get( api_url_for(rotkehlchen_api_server, 'adexhistoryresource'), json={ 'async_query': async_query, 'to_timestamp': 1611747322 }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) identity_address = '0x2a6c38D16BFdc7b4a20f1F982c058F07BDCe9204' tom_pool_id = '0x2ce0c96383fb229d9776f33846e983a956a7d95844fac57b180ed0071d93bb28' bond_id = '0x540cab9883923c01e657d5da4ca5674b6e4626b4a148224635495502d674c7c5' channel_id = '0x30d87bab0ef1e7f8b4c3b894ca2beed41bbd54c481f31e5791c1e855c9dbf4ba' result = result[ADEX_TEST_ADDR] expected_events = [ Bond( tx_hash= '0x9989f47c6c0a761f98f910ac24e2438d858be96c12124a13be4bb4b3150c55ea', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1604366004, bond_id=bond_id, pool_id=tom_pool_id, value=Balance(FVal(100000), FVal(200000)), nonce=0, slashed_at=0, ), ChannelWithdraw( tx_hash= '0xa9ee91af823c0173fc5ada908ff9fe3f4d7c84a2c9da795f0889b3f4ace75b13', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1607453764, channel_id=channel_id, pool_id=tom_pool_id, value=Balance(FVal('5056.894263641728544592'), FVal('10113.788527283457089184')), token=A_ADX, log_index=316, ), Unbond( tx_hash= '0xa9ee91af823c0173fc5ada908ff9fe3f4d7c84a2c9da795f0889b3f4ace75b13', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1607453764, bond_id=bond_id, pool_id=tom_pool_id, value=Balance(FVal(100000), FVal(200000)), ) ] assert len(result['events']) == 8 assert result['events'][:len(expected_events)] == [ x.serialize() for x in expected_events ] assert 'staking_details' in result # Make sure events end up in the DB assert len(rotki.data.db.get_adex_events()) != 0 # test adex data purging from the db works response = requests.delete( api_url_for( rotkehlchen_api_server, 'namedethereummoduledataresource', module_name='adex', )) assert_simple_ok_response(response) assert len(rotki.data.db.get_adex_events()) == 0
def test_get_balances( rotkehlchen_api_server, ethereum_accounts, # pylint: disable=unused-argument start_with_valid_premium, ): """Check querying the sushiswap balances endpoint works. Uses real data Checks the functionality both for the graph queries (when premium) and simple onchain queries (without premium) """ async_query = random.choice([False, True]) response = requests.get( api_url_for(rotkehlchen_api_server, 'sushiswapbalancesresource'), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( server=rotkehlchen_api_server, task_id=task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 10, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) if SWAP_ADDRESS not in result or len(result[SWAP_ADDRESS]) == 0: test_warnings.warn( UserWarning(f'Test account {SWAP_ADDRESS} has no sushiswap balances'), ) return address_balances = result[SWAP_ADDRESS] for lp in address_balances: # LiquidityPool attributes assert lp['address'].startswith('0x') assert len(lp['assets']) == 2 if start_with_valid_premium: assert lp['total_supply'] is not None else: assert lp['total_supply'] is None assert lp['user_balance']['amount'] assert lp['user_balance']['usd_value'] # LiquidityPoolAsset attributes for lp_asset in lp['assets']: lp_asset_type = type(lp_asset['asset']) assert lp_asset_type in (str, dict) # Unknown asset, at least contains token address if lp_asset_type is dict: assert lp_asset['asset']['ethereum_address'].startswith('0x') # Known asset, contains identifier else: assert not lp_asset['asset'].startswith('0x') if start_with_valid_premium: assert lp_asset['total_amount'] is not None else: assert lp_asset['total_amount'] is None assert lp_asset['usd_price'] assert len(lp_asset['user_balance']) == 2 assert lp_asset['user_balance']['amount'] assert lp_asset['user_balance']['usd_value']
def test_get_events_history_filtering_by_timestamp( rotkehlchen_api_server, ethereum_accounts, # pylint: disable=unused-argument ): """Test the events balances from 1627401169 to 1627401170 (both included).""" # Call time range from_timestamp = 1627401169 to_timestamp = 1627401170 async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, eth_balances=['33000030003'], token_balances={}, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) # Force insert address' last used query range, for avoiding query all rotki.data.db.update_used_query_range( name=f'{SUSHISWAP_EVENTS_PREFIX}_{TEST_EVENTS_ADDRESS_1}', start_ts=Timestamp(0), end_ts=from_timestamp, ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'sushiswapeventshistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': from_timestamp, 'to_timestamp': to_timestamp, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=120) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) events_balances = result[TEST_EVENTS_ADDRESS_1] assert len(events_balances) == 1 assert EXPECTED_EVENTS_BALANCES_1[0].serialize() == events_balances[0] # Make sure they end up in the DB events = rotki.data.db.get_amm_events([EventType.MINT_SUSHISWAP, EventType.BURN_SUSHISWAP]) assert len(events) != 0 # test sushiswap data purging from the db works response = requests.delete(api_url_for( rotkehlchen_api_server, 'namedethereummoduledataresource', module_name='sushiswap', )) assert_simple_ok_response(response) events = rotki.data.db.get_amm_events([EventType.MINT_SUSHISWAP, EventType.BURN_SUSHISWAP]) assert len(events) == 0
def test_trove_events(rotkehlchen_api_server): """Test that Trove events events are correctly queried""" async_query = random.choice([True, False]) response = requests.get( api_url_for( rotkehlchen_api_server, 'liquitytroveshistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': 0, 'to_timestamp': 1628026696, 'reset_db_data': False, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert LQTY_ADDR in result assert len(result[LQTY_ADDR]) == 2 trove_action = result[LQTY_ADDR][0] tx_id = '0xc8ad6f6ec244a93e1d66e60d1eab2ff2cb9de1f3a1f45c7bb4e9d2f720254137' assert trove_action['tx'] == tx_id assert trove_action['timestamp'] == 1627818194 assert trove_action['kind'] == 'trove' assert trove_action['debt_delta']['amount'] == trove_action['debt_after'][ 'amount'] assert trove_action['debt_delta']['amount'] == '6029.001719188487125' assert trove_action['trove_operation'] == 'Open Trove' assert trove_action['collateral_after']['amount'] == trove_action[ 'collateral_delta']['amount'] assert trove_action['collateral_delta']['amount'] == '3.5' assert trove_action['sequence_number'] == '74148' # Check for account with dsproxy response = requests.get( api_url_for( rotkehlchen_api_server, 'liquitytroveshistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': 1641529258, 'to_timestamp': 1641529258, 'reset_db_data': False, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert len(result[LQTY_PROXY]) == 1 trove_action = result[LQTY_PROXY][0] tx_id = '0xef24b51a09151cce6728de1f9c3a0e69ca40db1dcc82f287a1743e41c90ce95b' assert trove_action['tx'] == tx_id assert trove_action['timestamp'] == 1641529258 assert trove_action['kind'] == 'trove' assert trove_action['debt_after']['amount'] == '0' assert trove_action['debt_delta']['amount'] == '-27436.074977906493051' assert trove_action['trove_operation'] == 'Liquidation In Normal Mode' assert trove_action['collateral_after']['amount'] == '0' assert trove_action['collateral_delta'][ 'amount'] == '-9.420492116554037728' assert trove_action['sequence_number'] == '105764'
def test_query_asset_movements(rotkehlchen_api_server_with_exchanges): """Test that using the asset movements query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = prepare_rotki_for_history_processing_test( server.rest_api.rotkehlchen) # setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query asset movements of one specific exchange with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={ 'location': 'poloniex', 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] == 4 assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT poloniex_ids = [x['entry']['identifier'] for x in result['entries']] assert_poloniex_asset_movements([x['entry'] for x in result['entries']], deserialized=True) assert all( x['ignored_in_accounting'] is False for x in result['entries']), 'ignored should be false' # noqa: E501 # now let's ignore all poloniex action ids response = requests.put( api_url_for( rotkehlchen_api_server_with_exchanges, "ignoredactionsresource", ), json={ 'action_type': 'asset movement', 'action_ids': poloniex_ids }, ) result = assert_proper_response_with_result(response) assert set(result['asset movement']) == set(poloniex_ids) # query asset movements of all exchanges with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements([ x['entry'] for x in movements if x['entry']['location'] == 'poloniex' ], True) # noqa: E501 assert_kraken_asset_movements( [x['entry'] for x in movements if x['entry']['location'] == 'kraken'], True) # noqa: E501 def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( to_check_list=[ x['entry'] for x in movements if x['entry']['location'] == 'poloniex' ], deserialized=True, movements_to_check=(1, 2), ) msg = 'poloniex asset movements should have now been ignored for accounting' assert all(x['ignored_in_accounting'] is True for x in movements if x['entry']['location'] == 'poloniex'), msg # noqa: E501 assert_kraken_asset_movements( to_check_list=[ x['entry'] for x in movements if x['entry']['location'] == 'kraken' ], deserialized=True, movements_to_check=(0, 1, 2), ) # and now query them in a specific time range excluding some asset movements data = { 'from_timestamp': 1439994442, 'to_timestamp': 1458994442, 'async_query': async_query } with setup.polo_patch: response = requests.get(api_url_for(server, "assetmovementsresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource") + '?' + urlencode(data)) assert_okay(response)
def test_query_history(rotkehlchen_api_server_with_exchanges): """Test that the history processing REST API endpoint works. Similar to test_history.py""" async_query = random.choice([False, True]) start_ts = 0 end_ts = 1601040361 rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id, ) else: outcome = assert_proper_response_with_result(response) # Simply check that the results got returned here. The actual correctness of # accounting results is checked in other tests such as test_simple_accounting assert len(outcome) == 2 overview = outcome['overview'] assert len(overview) == 10 assert overview["loan_profit"] is not None assert overview["margin_positions_profit_loss"] is not None assert overview["settlement_losses"] is not None assert overview["ethereum_transaction_gas_costs"] is not None assert overview["asset_movement_fees"] is not None assert overview["general_trade_profit_loss"] is not None assert overview["taxable_trade_profit_loss"] is not None assert overview["total_taxable_profit_loss"] is not None assert overview["total_profit_loss"] is not None assert overview["defi_profit_loss"] is not None all_events = outcome['all_events'] assert isinstance(all_events, list) # TODO: These events are not actually checked anywhere for correctness # A test should probably be made for their correctness, even though # they are assumed correct if the overview is correct assert len(all_events) == 36 # And now make sure that warnings have also been generated for the query of # the unsupported/unknown assets warnings = rotki.msg_aggregator.consume_warnings() assert len(warnings) == 13 assert 'poloniex trade with unknown asset NOEXISTINGASSET' in warnings[0] assert 'poloniex trade with unsupported asset BALLS' in warnings[1] assert 'withdrawal of unknown poloniex asset IDONTEXIST' in warnings[2] assert 'withdrawal of unsupported poloniex asset DIS' in warnings[3] assert 'deposit of unknown poloniex asset IDONTEXIST' in warnings[4] assert 'deposit of unsupported poloniex asset EBT' in warnings[5] assert 'poloniex loan with unsupported asset BDC' in warnings[6] assert 'poloniex loan with unknown asset NOTEXISTINGASSET' in warnings[7] assert 'bittrex trade with unsupported asset PTON' in warnings[8] assert 'bittrex trade with unknown asset IDONTEXIST' in warnings[9] assert 'kraken trade with unknown asset IDONTEXISTTOO' in warnings[10] assert 'unknown kraken asset IDONTEXIST. Ignoring its deposit/withdrawals' in warnings[ 11] msg = 'unknown kraken asset IDONTEXISTEITHER. Ignoring its deposit/withdrawals query' assert msg in warnings[12] errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 3 assert 'bittrex trade with unprocessable pair %$#%$#%#$%' in errors[0] assert 'kraken trade with unprocessable pair IDONTEXISTZEUR' in errors[1] assert 'kraken trade with unprocessable pair %$#%$#%$#%$#%$#%' in errors[2]
def test_query_async_tasks(rotkehlchen_api_server_with_exchanges): """Test that querying the outcomes of async tasks works as expected We don't mock price queries in this test only because that cause the tasks list test below to fail since due to the mocking the tasks returns immediately and does not wait on a gevent context switching. So if we mock we don't get to test the task is still pending functionality. """ # async query balances of one specific exchange server = rotkehlchen_api_server_with_exchanges binance = try_get_first_exchange(server.rest_api.rotkehlchen.exchange_manager, Location.BINANCE) # noqa: E501 binance_patch = patch.object(binance.session, 'get', side_effect=mock_binance_balance_response) # Check querying the async taks resource when no async task is scheduled response = requests.get(api_url_for(server, "asynctasksresource")) assert_proper_response(response) json_data = response.json() assert json_data['message'] == '' assert json_data['result'] == {'completed': [], 'pending': []} # Create an async task with binance_patch: response = requests.get(api_url_for( server, 'named_exchanges_balances_resource', location='binance', ), json={'async_query': True}) task_id = assert_ok_async_response(response) # now check that there is a task response = requests.get(api_url_for(server, 'asynctasksresource')) assert_proper_response(response) json_data = response.json() assert json_data['message'] == '' assert json_data['result'] == {'completed': [], 'pending': [task_id]} # assert json_data['result'] == {'completed': [], 'pending': []} # now query for the task result and see it's still pending (test for task lists) response = requests.get( api_url_for(server, 'specific_async_tasks_resource', task_id=task_id), ) assert_proper_response(response) json_data = response.json() assert json_data['message'] == 'The task with id 0 is still pending' assert json_data['result'] == {'status': 'pending', 'outcome': None} while True: # and now query for the task result and assert on it response = requests.get( api_url_for(server, "specific_async_tasks_resource", task_id=task_id), ) assert_proper_response(response) json_data = response.json() if json_data['result']['status'] == 'pending': # context switch so that the greenlet to query balances can operate gevent.sleep(1) elif json_data['result']['status'] == 'completed': break else: raise AssertionError(f"Unexpected status: {json_data['result']['status']}") assert json_data['message'] == '' assert json_data['result']['status'] == 'completed' # assert that there is an outcome assert json_data['result']['outcome'] is not None assert json_data['result']['outcome']['result'] is not None assert json_data['result']['outcome']['message'] == '' # Finally try to query an unknown task id and check proper error is returned response = requests.get( api_url_for(server, "specific_async_tasks_resource", task_id=568), ) assert_error_response( response=response, contained_in_msg='No task with id 568 found', status_code=HTTPStatus.NOT_FOUND, result_exists=True, ) json_data = response.json() assert json_data['result'] == {'status': 'not-found', 'outcome': None}
def test_query_transactions(rotkehlchen_api_server): """Test that querying the ethereum transactions endpoint works as expected This test uses real data. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Check that we get all transactions response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) expected_result = EXPECTED_AFB7_TXS + EXPECTED_4193_TXS expected_result.sort(key=lambda x: x['timestamp']) expected_result.reverse() # Make sure that all of the transactions we expect are there and in order # There can be more transactions (since the address can make more) # but this check ignores them previous_index = 0 result_entries = [x['entry'] for x in result['entries']] assert all(x['ignored_in_accounting'] is False for x in result['entries'] ), 'by default nothing should be ignored' # noqa: E501 for entry in expected_result: assert entry in result_entries entry_idx = result_entries.index(entry) if previous_index != 0: assert entry_idx == previous_index + 1 previous_index = entry_idx assert result['entries_found'] >= len(expected_result) assert result['entries_limit'] == FREE_ETH_TX_LIMIT # now let's ignore two transactions ignored_ids = [ EXPECTED_AFB7_TXS[2]['tx_hash'] + EXPECTED_AFB7_TXS[2]['from_address'] + str(EXPECTED_AFB7_TXS[2]['nonce']), # noqa: E501 EXPECTED_AFB7_TXS[3]['tx_hash'] + EXPECTED_AFB7_TXS[3]['from_address'] + str(EXPECTED_AFB7_TXS[3]['nonce']), # noqa: E501 ] response = requests.put( api_url_for( rotkehlchen_api_server, "ignoredactionsresource", ), json={ 'action_type': 'ethereum transaction', 'action_ids': ignored_ids }, ) result = assert_proper_response_with_result(response) assert result == {'ethereum transaction': ignored_ids} # Check that transactions per address and in a specific time range can be # queried and that this is from the DB and not etherscan def mock_etherscan_get(url, *args, **kwargs): # pylint: disable=unused-argument return MockResponse(200, "{}") etherscan_patch = patch.object(rotki.etherscan.session, 'get', wraps=mock_etherscan_get) with etherscan_patch as mock_call: response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address='0xaFB7ed3beBE50E0b62Fa862FAba93e7A46e59cA7', ), json={ 'async_query': async_query, 'from_timestamp': 1461399856, 'to_timestamp': 1494458860, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert mock_call.call_count == 0 result_entries = [x['entry'] for x in result['entries']] assert result_entries == EXPECTED_AFB7_TXS[2:4][::-1] msg = 'the transactions we ignored have not been ignored for accounting' assert all(x['ignored_in_accounting'] is True for x in result['entries']), msg
def test_query_asset_movements(rotkehlchen_api_server_with_exchanges, async_query): """Test that using the asset movements query endpoint works fine""" server = rotkehlchen_api_server_with_exchanges setup = prepare_rotki_for_history_processing_test( server.rest_api.rotkehlchen) # setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query asset movements of one specific exchange with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={ 'location': 'poloniex', 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] == 4 assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT assert_poloniex_asset_movements(result['entries'], deserialized=True) # query asset movements of all exchanges with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( [x for x in movements if x['location'] == 'poloniex'], True) assert_kraken_asset_movements( [x for x in movements if x['location'] == 'kraken'], True) def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( to_check_list=[ x for x in movements if x['location'] == 'poloniex' ], deserialized=True, movements_to_check=(1, 2), ) assert_kraken_asset_movements( to_check_list=[x for x in movements if x['location'] == 'kraken'], deserialized=True, movements_to_check=(0, 1, 2), ) # and now query them in a specific time range excluding some asset movements data = { 'from_timestamp': 1439994442, 'to_timestamp': 1458994442, 'async_query': async_query } with setup.polo_patch: response = requests.get(api_url_for(server, "assetmovementsresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource") + '?' + urlencode(data)) assert_okay(response)
def test_query_transactions(rotkehlchen_api_server): """Test that querying the ethereum transactions endpoint works as expected This test uses real data. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Check that we get all transactions response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) expected_result = EXPECTED_AFB7_TXS + EXPECTED_4193_TXS expected_result.sort(key=lambda x: x['timestamp']) expected_result.reverse() # Make sure that all of the transactions we expect are there and in order # There can be more transactions (since the address can make more) # but this check ignores them previous_index = 0 for entry in expected_result: assert entry in result['entries'] entry_idx = result['entries'].index(entry) if previous_index != 0: assert entry_idx == previous_index + 1 previous_index = entry_idx assert result['entries_found'] >= len(expected_result) assert result['entries_limit'] == FREE_ETH_TX_LIMIT # Check that transactions per address and in a specific time range can be # queried and that this is from the DB and not etherscan def mock_etherscan_get(url, *args, **kwargs): # pylint: disable=unused-argument return MockResponse(200, "{}") etherscan_patch = patch.object(rotki.etherscan.session, 'get', wraps=mock_etherscan_get) with etherscan_patch as mock_call: response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address='0xaFB7ed3beBE50E0b62Fa862FAba93e7A46e59cA7', ), json={ 'async_query': async_query, "from_timestamp": 1461399856, "to_timestamp": 1494458860, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert mock_call.call_count == 0 assert result['entries'] == EXPECTED_AFB7_TXS[2:4][::-1]
def test_query_aave_balances(rotkehlchen_api_server, ethereum_accounts): """Check querying the aave balances endpoint works. Uses real data. TODO: Here we should use a test account for which we will know what balances it has and we never modify """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, original_queries=['zerion'], ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get(api_url_for( rotkehlchen_api_server, "aavebalancesresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) if len(result) == 0: test_warnings.warn(UserWarning(f'Test account {AAVE_BALANCESV1_TEST_ACC} and {AAVE_BALANCESV2_TEST_ACC} have no aave balances')) # noqa: E501 return def _assert_valid_entries(balances: Dict[str, Any]) -> None: lending = v1_balances['lending'] for _, entry in lending.items(): assert len(entry) == 2 assert len(entry['balance']) == 2 assert 'amount' in entry['balance'] assert 'usd_value' in entry['balance'] assert '%' in entry['apy'] borrowing = balances['borrowing'] for _, entry in borrowing.items(): assert len(entry) == 3 assert len(entry['balance']) == 2 assert 'amount' in entry['balance'] assert 'usd_value' in entry['balance'] assert '%' in entry['variable_apr'] assert '%' in entry['stable_apr'] v1_balances = result.get(AAVE_BALANCESV1_TEST_ACC) if v1_balances: _assert_valid_entries(v1_balances) else: test_warnings.warn(UserWarning(f'Test account {AAVE_BALANCESV1_TEST_ACC} has no aave v1 balances')) # noqa: E501 v2_balances = result.get(AAVE_BALANCESV2_TEST_ACC) if v2_balances: _assert_valid_entries(v2_balances) else: test_warnings.warn(UserWarning(f'Test account {AAVE_BALANCESV2_TEST_ACC} has no aave v2 balances')) # noqa: E501
def test_add_delete_xpub(rotkehlchen_api_server): """This test uses real world data (queries actual BTC balances) Test data from here: https://github.com/LedgerHQ/bitcoin-keychain-svc/blob/744736af1819cdab0a46ea7faf834008aeade6b1/integration/p2pkh_keychain_test.go#L40-L95 """ # Disable caching of query results rotki = rotkehlchen_api_server.rest_api.rotkehlchen rotki.chain_manager.cache_ttl_secs = 0 async_query = random.choice([False, True]) tag1 = { 'name': 'ledger', 'description': 'My ledger accounts', 'background_color': 'ffffff', 'foreground_color': '000000', } response = requests.put( api_url_for( rotkehlchen_api_server, 'tagsresource', ), json=tag1, ) tag2 = { 'name': 'public', 'description': 'My public accounts', 'background_color': 'ffffff', 'foreground_color': '000000', } response = requests.put( api_url_for( rotkehlchen_api_server, 'tagsresource', ), json=tag2, ) assert_proper_response(response) xpub1 = 'xpub6DCi5iJ57ZPd5qPzvTm5hUt6X23TJdh9H4NjNsNbt7t7UuTMJfawQWsdWRFhfLwkiMkB1rQ4ZJWLB9YBnzR7kbs9N8b2PsKZgKUHQm1X4or' # noqa : E501 xpub1_label = 'ledger_test_xpub' xpub1_tags = ['ledger', 'public'] json_data = { 'async_query': async_query, 'xpub': xpub1, 'label': xpub1_label, 'tags': xpub1_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) _check_xpub_addition_outcome(outcome, xpub1) # Make sure that adding existing xpub fails json_data = { 'async_query': False, 'xpub': xpub1, 'label': xpub1_label, 'tags': xpub1_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) assert_error_response( response=response, contained_in_msg= f'Xpub {xpub1} with derivation path None is already tracked', status_code=HTTPStatus.BAD_REQUEST, ) # Add an xpub with no derived addresses xpub2 = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa : E501 xpub2_label = None xpub2_tags = None json_data = { 'async_query': async_query, 'xpub': xpub2, 'label': xpub2_label, 'tags': xpub2_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) _check_xpub_addition_outcome(outcome, xpub1) # Also make sure that blockchain account data endpoint returns everything correctly response = requests.get( api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='BTC', )) outcome = assert_proper_response_with_result(response) assert len(outcome['standalone']) == 2 for entry in outcome['standalone']: assert entry['address'] in (UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2) assert entry['tags'] is None assert entry['label'] is None assert len(outcome['xpubs']) == 2 for entry in outcome['xpubs']: assert len(entry) == 5 if entry['xpub'] == xpub1: for address_data in entry['addresses']: assert address_data['address'] in EXPECTED_XPUB_ADDESSES assert address_data['label'] is None assert address_data['tags'] == xpub1_tags else: assert entry['xpub'] == xpub2 assert entry['addresses'] is None assert entry['label'] is None assert entry['tags'] is None # Now delete the xpub and make sure all derived addresses are gone json_data = { 'async_query': async_query, 'xpub': xpub1, 'derivation_path': None, } response = requests.delete(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) btc = outcome['per_account']['BTC'] assert len(btc['standalone']) == 2 assert UNIT_BTC_ADDRESS1 in btc['standalone'] assert UNIT_BTC_ADDRESS2 in btc['standalone'] assert 'xpubs' not in btc assert outcome['totals']['BTC']['amount'] is not None assert outcome['totals']['BTC']['usd_value'] is not None # Also make sure all mappings are gone from the DB cursor = rotki.data.db.conn.cursor() result = cursor.execute( 'SELECT object_reference from tag_mappings;').fetchall() assert len(result) == 0, 'all tag mappings should have been deleted' result = cursor.execute('SELECT * from xpub_mappings WHERE xpub=?', (xpub1, )).fetchall() assert len(result) == 0, 'all xpub mappings should have been deleted'
def test_add_and_query_manually_tracked_balances( rotkehlchen_api_server, ethereum_accounts, ): """Test that adding and querying manually tracked balances via the API works fine""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None) _populate_tags(rotkehlchen_api_server) response = requests.get( api_url_for( rotkehlchen_api_server, "manuallytrackedbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result[ 'balances'] == [], 'In the beginning we should have no entries' balances = _populate_initial_balances(rotkehlchen_api_server) # now query and make sure the added balances are returned response = requests.get( api_url_for( rotkehlchen_api_server, "manuallytrackedbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert_balances_match(expected_balances=balances, returned_balances=result['balances']) now = ts_now() # Also now test for https://github.com/rotki/rotki/issues/942 by querying for all balances # causing all balances to be saved and making sure the manual balances also got saved with ExitStack() as stack: setup.enter_ethereum_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, "allbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assets = result['assets'] assert len(assets) == 5 assert assets['BTC']['amount'] == '1.425' assert assets['XMR']['amount'] == '50.315' assert assets[A_BNB.identifier]['amount'] == '155' assert assets['ETH']['amount'] == '3E-12' # from ethereum on-chain balances assert assets[A_RDN.identifier][ 'amount'] == '4E-12' # from ethereum on-chain balances liabilities = result['liabilities'] assert len(liabilities) == 2 assert liabilities['ETH']['amount'] == '2' assert liabilities['USD']['amount'] == '100' # Check DB to make sure a save happened assert rotki.data.db.get_last_balance_save_time() >= now assert set(rotki.data.db.query_owned_assets()) == { 'BTC', 'XMR', A_BNB.identifier, 'ETH', A_RDN.identifier, }
def test_update_conflicts(rotkehlchen_api_server, globaldb): """Test that conflicts in an asset update are handled properly""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_1 = """INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0x6B175474E89094C44Da98b954EedeAC495271d0F", 8, "maker");INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F", "C", "New Multi Collateral DAI", "NDAI", 1573672677, NULL, "dai", NULL, "0x6B175474E89094C44Da98b954EedeAC495271d0F"); * INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("DASH", "B","Dash","DASH",1337, NULL, "dash-coingecko", NULL, "DASH");INSERT INTO common_asset_details(asset_id, forked) VALUES("DASH", "BTC"); * """ # noqa: E501 update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999991, updates={ "999999991": { "changes": 3, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, } }, sql_actions={"999999991": update_1}, ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999990) start_assets_num = len(globaldb.get_all_asset_data(mapping=False)) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999990 assert result['remote'] == 999999991 assert result['new_changes'] == 3 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome[ 'message'] == 'Found conflicts during assets upgrade' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='Found conflicts during assets upgrade', status_code=HTTPStatus.CONFLICT, ) # Make sure that nothing was committed assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999990 assert len( globaldb.get_all_asset_data(mapping=False)) == start_assets_num with pytest.raises(UnknownAsset): Asset('121-ada-FADS-as') errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' # See that we get two conflicts expected_result = [{ 'identifier': '_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F', 'local': { 'name': 'Multi Collateral Dai', 'symbol': 'DAI', 'asset_type': 'ethereum token', 'started': 1573672677, 'forked': None, 'swapped_for': None, 'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'dai', 'protocol': None, }, 'remote': { 'name': 'New Multi Collateral DAI', 'symbol': 'NDAI', 'asset_type': 'ethereum token', 'started': 1573672677, 'forked': None, 'swapped_for': None, 'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'decimals': 8, 'cryptocompare': None, 'coingecko': 'dai', 'protocol': 'maker', }, }, { 'identifier': 'DASH', 'local': { 'name': 'Dash', 'symbol': 'DASH', 'asset_type': 'own chain', 'started': 1390095618, 'forked': None, 'swapped_for': None, 'ethereum_address': None, 'decimals': None, 'cryptocompare': None, 'coingecko': 'dash', 'protocol': None, }, 'remote': { 'name': 'Dash', 'symbol': 'DASH', 'asset_type': 'own chain', 'started': 1337, 'forked': 'BTC', 'swapped_for': None, 'ethereum_address': None, 'decimals': None, 'cryptocompare': None, 'coingecko': 'dash-coingecko', 'protocol': None, }, }] assert result == expected_result # now try the update again but specify the conflicts resolution conflicts = { '_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F': 'remote', 'DASH': 'local' } response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'conflicts': conflicts }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='', status_code=HTTPStatus.OK, ) # check conflicts were solved as per the given choices and new asset also added assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999991 errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' dai = EthereumToken('0x6B175474E89094C44Da98b954EedeAC495271d0F') assert dai.identifier == strethaddress_to_identifier( '0x6B175474E89094C44Da98b954EedeAC495271d0F') # noqa: E501 assert dai.name == 'New Multi Collateral DAI' assert dai.symbol == 'NDAI' assert dai.asset_type == AssetType.ETHEREUM_TOKEN assert dai.started == 1573672677 assert dai.forked is None assert dai.swapped_for is None assert dai.coingecko == 'dai' assert dai.cryptocompare is None assert dai.ethereum_address == '0x6B175474E89094C44Da98b954EedeAC495271d0F' assert dai.decimals == 8 assert dai.protocol == 'maker' dash = Asset('DASH') assert dash.identifier == 'DASH' assert dash.name == 'Dash' assert dash.symbol == 'DASH' assert dash.asset_type == AssetType.OWN_CHAIN assert dash.started == 1390095618 assert dash.forked is None assert dash.swapped_for is None assert dash.coingecko == 'dash' assert dash.cryptocompare is None new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == ''
def test_add_and_query_manually_tracked_balances( rotkehlchen_api_server, async_query, ): """Test that adding and querying manually tracked balances via the API works fine""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen _populate_tags(rotkehlchen_api_server) response = requests.get( api_url_for( rotkehlchen_api_server, "manuallytrackedbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['balances'] == [], 'In the beginning we should have no entries' balances = _populate_initial_balances(rotkehlchen_api_server) # now query and make sure the added balances are returned response = requests.get( api_url_for( rotkehlchen_api_server, "manuallytrackedbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert_balances_match(expected_balances=balances, returned_balances=result['balances']) now = ts_now() # Also now test for https://github.com/rotki/rotki/issues/942 by querying for all balances # causing all balances to be saved and making sure the manual balances also got saved response = requests.get( api_url_for( rotkehlchen_api_server, "allbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['BTC']['amount'] == '1.425' assert result['XMR']['amount'] == '50.315' assert result['BNB']['amount'] == '155' # Check DB to make sure a save happened assert rotki.data.db.get_last_balance_save_time() >= now assert set(rotki.data.db.query_owned_assets()) == {'BTC', 'XMR', 'BNB', 'ETH'}
def test_simple_update(rotkehlchen_api_server, globaldb): """Test that the happy case of update works. - Test that up_to_version argument works - Test that only versions above current local are applied - Test that versions with min/max schema mismatch are skipped """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_3 = """INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0xC2FEC534c461c45533e142f724d0e3930650929c", 18, NULL);INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0xC2FEC534c461c45533e142f724d0e3930650929c", "C", "AKB token", "AKB",123, NULL, NULL, "AIDU", "0xC2FEC534c461c45533e142f724d0e3930650929c"); * INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * UPDATE assets SET name="Ευρώ" WHERE identifier="EUR"; INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("EUR", "A","Ευρώ","EUR",NULL, NULL,NULL,NULL, "EUR");INSERT INTO common_asset_details(asset_id, forked) VALUES("EUR", NULL); """ # noqa: E501 update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999996, updates={ "999999991": { "changes": 1, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999992": { "changes": 1, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999993": { "changes": 3, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999994": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION + 1, "max_schema_version": GLOBAL_DB_VERSION - 1, }, "999999995": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999996": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, }, sql_actions={ "999999991": "", "999999992": "", "999999993": update_3, "999999994": "", "999999995": "" }, # noqa: E501 ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999992) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999992 assert result['remote'] == 999999996 assert result[ 'new_changes'] == 13 # changes from 99[3 + 4 + 6], skipping 5 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'up_to_version': 999999995 }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 1 assert 'Skipping assets update 999999994 since it requires a min schema of' in warnings[ 0] assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999995 new_token = EthereumToken('0xC2FEC534c461c45533e142f724d0e3930650929c') assert new_token.identifier == strethaddress_to_identifier( '0xC2FEC534c461c45533e142f724d0e3930650929c') # noqa: E501 assert new_token.name == 'AKB token' assert new_token.symbol == 'AKB' assert new_token.asset_type == AssetType.ETHEREUM_TOKEN assert new_token.started == 123 assert new_token.forked is None assert new_token.swapped_for is None assert new_token.coingecko is None assert new_token.cryptocompare == 'AIDU' assert new_token.ethereum_address == '0xC2FEC534c461c45533e142f724d0e3930650929c' assert new_token.decimals == 18 assert new_token.protocol is None new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' assert Asset('EUR').name == 'Ευρώ'
def test_exchange_query_trades(rotkehlchen_api_server_with_exchanges): """Test that using the exchange trades query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query trades of one specific exchange with setup.binance_patch: response = requests.get( api_url_for( server, "tradesresource", ), json={ 'location': 'binance', 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] > 0 assert result['entries_limit'] == FREE_TRADES_LIMIT assert_binance_trades_result([x['entry'] for x in result['entries']]) # query trades of all exchanges with setup.binance_patch, setup.polo_patch: response = requests.get( api_url_for(server, "tradesresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) trades = result['entries'] assert_binance_trades_result([ x['entry'] for x in trades if x['entry']['location'] == 'binance' ]) # noqa: E501 assert_poloniex_trades_result([ x['entry'] for x in trades if x['entry']['location'] == 'poloniex' ]) # noqa: E501 def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) trades = result['entries'] assert_binance_trades_result([ x['entry'] for x in trades if x['entry']['location'] == 'binance' ]) # noqa: E501 assert_poloniex_trades_result( trades=[ x['entry'] for x in trades if x['entry']['location'] == 'poloniex' ], trades_to_check=(2, ), ) # and now query them in a specific time range excluding two of poloniex's trades data = { 'from_timestamp': 1499865548, 'to_timestamp': 1539713118, 'async_query': async_query } with setup.binance_patch, setup.polo_patch: response = requests.get(api_url_for(server, "tradesresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.binance_patch, setup.polo_patch: response = requests.get( api_url_for(server, "tradesresource") + '?' + urlencode(data)) assert_okay(response)
def test_multiple_balance_queries_not_concurrent( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, separate_blockchain_calls, ): """Test multiple different balance query requests happening concurrently This tests that if multiple balance query requests happen concurrently we do not end up doing them multiple times, but reuse the results thanks to cache. Try running both all blockchain balances in one call and each blockchain call separately. """ rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts, btc_accounts) multieth_balance_patch = patch.object( rotki.chain_manager.ethereum, 'get_multieth_balance', wraps=rotki.chain_manager.ethereum.get_multieth_balance, ) btc_balances_patch = patch( 'rotkehlchen.chain.manager.get_bitcoin_addresses_balances', wraps=get_bitcoin_addresses_balances, ) binance = rotki.exchange_manager.connected_exchanges['binance'] binance_querydict_patch = patch.object(binance, 'api_query_dict', wraps=binance.api_query_dict) # Test all balances request by requesting to not save the data with ExitStack() as stack: setup.enter_all_patches(stack) eth = stack.enter_context(multieth_balance_patch) btc = stack.enter_context(btc_balances_patch) bn = stack.enter_context(binance_querydict_patch) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': True}, ) task_id_all = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "named_exchanges_balances_resource", name='binance', ), json={'async_query': True}) task_id_one_exchange = assert_ok_async_response(response) if separate_blockchain_calls: response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={ 'async_query': True, 'blockchain': 'ETH' }) task_id_blockchain_eth = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={ 'async_query': True, 'blockchain': 'BTC' }) task_id_blockchain_btc = assert_ok_async_response(response) else: response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True}) task_id_blockchain = assert_ok_async_response(response) outcome_all = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_all, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) outcome_one_exchange = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id_one_exchange, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) if separate_blockchain_calls: outcome_eth = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain_eth, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) outcome_btc = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain_btc, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) else: outcome_blockchain = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) assert eth.call_count == 1, 'eth balance query should only fire once' assert btc.call_count == 1, 'btc balance query should only happen once' assert bn.call_count == 3, 'binance balance query should do 2 calls' assert_all_balances( result=outcome_all, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) assert_binance_balances_result(outcome_one_exchange['result']) if not separate_blockchain_calls: outcome_eth = outcome_blockchain outcome_btc = outcome_blockchain assert_eth_balances_result( rotki=rotki, result=outcome_eth, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=not separate_blockchain_calls, ) assert_btc_balances_result( result=outcome_btc, btc_accounts=btc_accounts, btc_balances=setup.btc_balances, also_eth=not separate_blockchain_calls, )
def test_query_eth2_balances(rotkehlchen_api_server, query_all_balances): ownership_proportion = FVal(0.45) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), ) result = assert_proper_response_with_result(response) assert result == {'entries': [], 'entries_limit': -1, 'entries_found': 0} validators = [Eth2Validator( index=4235, public_key='0xadd548bb2e6962c255ec5420e40e6e506dfc936592c700d56718ada7dcc52e4295644ff8f94f4ef898aa8a5ad81a5b84', # noqa: E501 ownership_proportion=ONE, ), Eth2Validator( index=5235, public_key='0x827e0f30c3d34e3ee58957dd7956b0f194d64cc404fca4a7313dc1b25ac1f28dcaddf59d05fbda798fa5b894c91b84fb', # noqa: E501 ownership_proportion=ownership_proportion, )] response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'validator_index': validators[0].index}, ) assert_simple_ok_response(response) response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'public_key': validators[1].public_key, 'ownership_percentage': '45'}, ) assert_simple_ok_response(response) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), ) result = assert_proper_response_with_result(response) assert result == {'entries': [x.serialize() for x in validators], 'entries_limit': -1, 'entries_found': 2} # noqa: E501 async_query = random.choice([False, True]) if query_all_balances: response = requests.get(api_url_for( rotkehlchen_api_server, 'blockchainbalancesresource', ), json={'async_query': async_query}) else: response = requests.get(api_url_for( rotkehlchen_api_server, 'named_blockchain_balances_resource', blockchain='ETH2', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( server=rotkehlchen_api_server, task_id=task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 5, ) else: outcome = assert_proper_response_with_result(response) assert len(outcome['per_account']) == 1 # only ETH2 per_acc = outcome['per_account']['ETH2'] assert len(per_acc) == 2 # hope they don't get slashed ;( amount1 = FVal('34.547410412') amount2 = FVal('34.600348623') * ownership_proportion assert FVal(per_acc[validators[0].public_key]['assets']['ETH2']['amount']) >= amount1 assert FVal(per_acc[validators[1].public_key]['assets']['ETH2']['amount']) >= amount2 totals = outcome['totals'] assert len(totals['assets']) == 1 assert len(totals['liabilities']) == 0 assert FVal(totals['assets']['ETH2']['amount']) >= amount1 + amount2 # now add 1 more validator and query ETH2 balances again to see it's included # the reason for this is to see the cache is properly invalidated at addition v0_pubkey = '0x933ad9491b62059dd065b560d256d8957a8c402cc6e8d8ee7290ae11e8f7329267a8811c397529dac52ae1342ba58c95' # noqa: E501 response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'validator_index': 0, 'public_key': v0_pubkey}, ) assert_simple_ok_response(response) response = requests.get(api_url_for( rotkehlchen_api_server, 'named_blockchain_balances_resource', blockchain='ETH2', ), json={'async_query': False, 'ignore_cache': True}) outcome = assert_proper_response_with_result(response) assert len(outcome['per_account']) == 1 # only ETH2 per_acc = outcome['per_account']['ETH2'] assert len(per_acc) == 3 amount1 = FVal('34.596290288') amount2 = FVal('34.547410412') amount3 = FVal('34.600348623') * ownership_proportion assert FVal(per_acc[v0_pubkey]['assets']['ETH2']['amount']) >= amount1 assert FVal(per_acc[validators[0].public_key]['assets']['ETH2']['amount']) >= amount2 assert FVal(per_acc[validators[1].public_key]['assets']['ETH2']['amount']) >= amount3 totals = outcome['totals'] assert len(totals['assets']) == 1 assert len(totals['liabilities']) == 0 assert FVal(totals['assets']['ETH2']['amount']) >= amount1 + amount2 + amount3