def test_foreignkey_conflict(rotkehlchen_api_server, globaldb): """Test that when a conflict that's not solvable happens the entry is ignored One such case is when the update of an asset would violate a foreign key constraint. So we try to update the swapped_for to a non existing asset and make sure it's skipped. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_1 = """INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * UPDATE assets SET swapped_for="_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972" WHERE identifier="_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d"; INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0xa74476443119A942dE498590Fe1f2454d7D4aC0d", 18, NULL);INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d", "C", "Golem", "GNT", 1478810650, "_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972", "golem", NULL, "0xa74476443119A942dE498590Fe1f2454d7D4aC0d"); """ # noqa: E501 update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999991, updates={ "999999991": { "changes": 2, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, } }, sql_actions={"999999991": update_1}, ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999990) start_assets_num = len(globaldb.get_all_asset_data(mapping=False)) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999990 assert result['remote'] == 999999991 assert result['new_changes'] == 2 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome[ 'message'] == 'Found conflicts during assets upgrade' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='Found conflicts during assets upgrade', status_code=HTTPStatus.CONFLICT, ) # Make sure that nothing was committed assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999990 assert len( globaldb.get_all_asset_data(mapping=False)) == start_assets_num with pytest.raises(UnknownAsset): Asset('121-ada-FADS-as') errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' # See that we get a conflict expected_result = [{ 'identifier': '_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'local': { 'name': 'Golem', 'symbol': 'GNT', 'asset_type': 'ethereum token', 'started': 1478810650, 'forked': None, 'swapped_for': '_ceth_0x7DD9c5Cba05E151C895FDe1CF355C9A1D5DA6429', 'ethereum_address': '0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'golem', 'protocol': None, }, 'remote': { 'name': 'Golem', 'symbol': 'GNT', 'asset_type': 'ethereum token', 'started': 1478810650, 'forked': None, 'swapped_for': '_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972', 'ethereum_address': '0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'golem', 'protocol': None, }, }] assert result == expected_result # now try the update again but specify the conflicts resolution conflicts = { '_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d': 'remote' } response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'conflicts': conflicts }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='', status_code=HTTPStatus.OK, ) # check new asset was added and conflict was ignored with an error due to # inability to do anything with the missing swapped_for assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999991 gnt = EthereumToken('0xa74476443119A942dE498590Fe1f2454d7D4aC0d') assert gnt.identifier == strethaddress_to_identifier( '0xa74476443119A942dE498590Fe1f2454d7D4aC0d') # noqa: E501 assert gnt.name == 'Golem' assert gnt.symbol == 'GNT' assert gnt.asset_type == AssetType.ETHEREUM_TOKEN assert gnt.started == 1478810650 assert gnt.forked is None assert gnt.swapped_for == A_GLM.identifier assert gnt.coingecko == 'golem' assert gnt.cryptocompare is None assert gnt.ethereum_address == '0xa74476443119A942dE498590Fe1f2454d7D4aC0d' assert gnt.decimals == 18 assert gnt.protocol is None new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 1 assert f'Failed to resolve conflict for {gnt.identifier} in the DB during the v999999991 assets update. Skipping entry' in warnings[ 0] # noqa: E501
def test_simple_update(rotkehlchen_api_server, globaldb): """Test that the happy case of update works. - Test that up_to_version argument works - Test that only versions above current local are applied - Test that versions with min/max schema mismatch are skipped """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_3 = """INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0xC2FEC534c461c45533e142f724d0e3930650929c", 18, NULL);INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0xC2FEC534c461c45533e142f724d0e3930650929c", "C", "AKB token", "AKB",123, NULL, NULL, "AIDU", "0xC2FEC534c461c45533e142f724d0e3930650929c"); * INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * UPDATE assets SET name="Ευρώ" WHERE identifier="EUR"; INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("EUR", "A","Ευρώ","EUR",NULL, NULL,NULL,NULL, "EUR");INSERT INTO common_asset_details(asset_id, forked) VALUES("EUR", NULL); """ # noqa: E501 update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999996, updates={ "999999991": { "changes": 1, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999992": { "changes": 1, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999993": { "changes": 3, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999994": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION + 1, "max_schema_version": GLOBAL_DB_VERSION - 1, }, "999999995": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999996": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, }, sql_actions={ "999999991": "", "999999992": "", "999999993": update_3, "999999994": "", "999999995": "" }, # noqa: E501 ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999992) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999992 assert result['remote'] == 999999996 assert result[ 'new_changes'] == 13 # changes from 99[3 + 4 + 6], skipping 5 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'up_to_version': 999999995 }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 1 assert 'Skipping assets update 999999994 since it requires a min schema of' in warnings[ 0] assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999995 new_token = EthereumToken('0xC2FEC534c461c45533e142f724d0e3930650929c') assert new_token.identifier == strethaddress_to_identifier( '0xC2FEC534c461c45533e142f724d0e3930650929c') # noqa: E501 assert new_token.name == 'AKB token' assert new_token.symbol == 'AKB' assert new_token.asset_type == AssetType.ETHEREUM_TOKEN assert new_token.started == 123 assert new_token.forked is None assert new_token.swapped_for is None assert new_token.coingecko is None assert new_token.cryptocompare == 'AIDU' assert new_token.ethereum_address == '0xC2FEC534c461c45533e142f724d0e3930650929c' assert new_token.decimals == 18 assert new_token.protocol is None new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' assert Asset('EUR').name == 'Ευρώ'
def test_query_blockchain_balances_async( rotkehlchen_api_server, ethereum_accounts, btc_accounts, number_of_eth_accounts, ): """Test that the query blockchain balances endpoint works when queried asynchronously """ # Disable caching of query results rotki = rotkehlchen_api_server.rest_api.rotkehlchen rotki.blockchain.cache_ttl_secs = 0 setup = setup_balances(rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts) # First query only ETH and token balances response = requests.get(api_url_for( rotkehlchen_api_server, "named_blockchain_balances_resource", blockchain='ETH', ), json={'async_query': True}) task_id = assert_ok_async_response(response) with setup.etherscan_patch: outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_eth_balances_result( rotki=rotki, json_data=outcome, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=False, ) # Then query only BTC balances response = requests.get(api_url_for( rotkehlchen_api_server, "named_blockchain_balances_resource", blockchain='BTC', ), json={'async_query': True}) task_id = assert_ok_async_response(response) with setup.bitcoin_patch: outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_btc_balances_result( json_data=outcome, btc_accounts=btc_accounts, btc_balances=setup.btc_balances, also_eth=False, ) # Finally query all balances response = requests.get(api_url_for( rotkehlchen_api_server, "blockchainbalancesresource", ), json={'async_query': True}) task_id = assert_ok_async_response(response) with setup.etherscan_patch, setup.bitcoin_patch: outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_eth_balances_result( rotki=rotki, json_data=outcome, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=True, ) assert_btc_balances_result( json_data=outcome, btc_accounts=btc_accounts, btc_balances=setup.btc_balances, also_eth=True, )
def test_update_conflicts(rotkehlchen_api_server, globaldb): """Test that conflicts in an asset update are handled properly""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_1 = """INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0x6B175474E89094C44Da98b954EedeAC495271d0F", 8, "maker");INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F", "C", "New Multi Collateral DAI", "NDAI", 1573672677, NULL, "dai", NULL, "0x6B175474E89094C44Da98b954EedeAC495271d0F"); * INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("DASH", "B","Dash","DASH",1337, NULL, "dash-coingecko", NULL, "DASH");INSERT INTO common_asset_details(asset_id, forked) VALUES("DASH", "BTC"); * INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0x1B175474E89094C44Da98b954EedeAC495271d0F", 18, NULL); INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F", "C", "Conflicting token", "CTK", 1573672677, NULL, "ctk", NULL, "0x1B175474E89094C44Da98b954EedeAC495271d0F"); * """ # noqa: E501 globaldb.add_asset( # add a conflicting token asset_id='_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F', asset_type=AssetType.ETHEREUM_TOKEN, data=EthereumToken.initialize( address=ChecksumEthAddress( '0x1B175474E89094C44Da98b954EedeAC495271d0F'), decimals=12, name='Conflicting token', symbol='CTK', started=None, swapped_for=None, coingecko='ctk', cryptocompare=None, protocol=None, underlying_tokens=None, ), ) globaldb.add_user_owned_assets( [Asset('_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F')]) update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999991, updates={ "999999991": { "changes": 3, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, } }, sql_actions={"999999991": update_1}, ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999990) start_assets_num = len(globaldb.get_all_asset_data(mapping=False)) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999990 assert result['remote'] == 999999991 assert result['new_changes'] == 3 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome[ 'message'] == 'Found conflicts during assets upgrade' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='Found conflicts during assets upgrade', status_code=HTTPStatus.CONFLICT, ) # Make sure that nothing was committed assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999990 assert len( globaldb.get_all_asset_data(mapping=False)) == start_assets_num with pytest.raises(UnknownAsset): Asset('121-ada-FADS-as') errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' # See that we get 3 conflicts expected_result = [{ 'identifier': '_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F', 'local': { 'name': 'Multi Collateral Dai', 'symbol': 'DAI', 'asset_type': 'ethereum token', 'started': 1573672677, 'forked': None, 'swapped_for': None, 'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'dai', 'protocol': None, }, 'remote': { 'name': 'New Multi Collateral DAI', 'symbol': 'NDAI', 'asset_type': 'ethereum token', 'started': 1573672677, 'forked': None, 'swapped_for': None, 'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'decimals': 8, 'cryptocompare': None, 'coingecko': 'dai', 'protocol': 'maker', }, }, { 'identifier': 'DASH', 'local': { 'name': 'Dash', 'symbol': 'DASH', 'asset_type': 'own chain', 'started': 1390095618, 'forked': None, 'swapped_for': None, 'ethereum_address': None, 'decimals': None, 'cryptocompare': None, 'coingecko': 'dash', 'protocol': None, }, 'remote': { 'name': 'Dash', 'symbol': 'DASH', 'asset_type': 'own chain', 'started': 1337, 'forked': 'BTC', 'swapped_for': None, 'ethereum_address': None, 'decimals': None, 'cryptocompare': None, 'coingecko': 'dash-coingecko', 'protocol': None, }, }, { 'identifier': '_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F', 'local': { 'asset_type': 'ethereum token', 'coingecko': 'ctk', 'cryptocompare': None, 'decimals': 12, 'ethereum_address': '0x1B175474E89094C44Da98b954EedeAC495271d0F', 'forked': None, 'name': 'Conflicting token', 'protocol': None, 'started': None, 'swapped_for': None, 'symbol': 'CTK', }, 'remote': { 'asset_type': 'ethereum token', 'coingecko': 'ctk', 'cryptocompare': None, 'decimals': 18, 'ethereum_address': '0x1b175474E89094C44DA98B954EeDEAC495271d0f', 'forked': None, 'name': 'Conflicting token', 'protocol': None, 'started': 1573672677, 'swapped_for': None, 'symbol': 'CTK', }, }] assert result == expected_result # now try the update again but specify the conflicts resolution conflicts = { '_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F': 'remote', 'DASH': 'local', '_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F': 'remote' } # noqa: E501 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'conflicts': conflicts }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='', status_code=HTTPStatus.OK, ) cursor = globaldb._conn.cursor() # check conflicts were solved as per the given choices and new asset also added assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999991 errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' dai = EthereumToken('0x6B175474E89094C44Da98b954EedeAC495271d0F') assert dai.identifier == strethaddress_to_identifier( '0x6B175474E89094C44Da98b954EedeAC495271d0F') # noqa: E501 assert dai.name == 'New Multi Collateral DAI' assert dai.symbol == 'NDAI' assert dai.asset_type == AssetType.ETHEREUM_TOKEN assert dai.started == 1573672677 assert dai.forked is None assert dai.swapped_for is None assert dai.coingecko == 'dai' assert dai.cryptocompare is None assert dai.ethereum_address == '0x6B175474E89094C44Da98b954EedeAC495271d0F' assert dai.decimals == 8 assert dai.protocol == 'maker' # make sure data is in both tables assert cursor.execute( 'SELECT COUNT(*) from ethereum_tokens WHERE address="0x6B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501 dash = Asset('DASH') assert dash.identifier == 'DASH' assert dash.name == 'Dash' assert dash.symbol == 'DASH' assert dash.asset_type == AssetType.OWN_CHAIN assert dash.started == 1390095618 assert dash.forked is None assert dash.swapped_for is None assert dash.coingecko == 'dash' assert dash.cryptocompare is None assert cursor.execute( 'SELECT COUNT(*) from common_asset_details WHERE asset_id="DASH";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="DASH";').fetchone( )[0] == 1 # noqa: E501 new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' assert cursor.execute( 'SELECT COUNT(*) from common_asset_details WHERE asset_id="121-ada-FADS-as";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="121-ada-FADS-as";' ).fetchone()[0] == 1 # noqa: E501 ctk = EthereumToken('0x1B175474E89094C44Da98b954EedeAC495271d0F') assert ctk.name == 'Conflicting token' assert ctk.symbol == 'CTK' assert ctk.asset_type == AssetType.ETHEREUM_TOKEN assert ctk.started == 1573672677 assert ctk.forked is None assert ctk.swapped_for is None assert ctk.coingecko == 'ctk' assert ctk.cryptocompare is None assert ctk.ethereum_address == '0x1B175474E89094C44Da98b954EedeAC495271d0F' assert ctk.decimals == 18 assert ctk.protocol is None assert cursor.execute( 'SELECT COUNT(*) from ethereum_tokens WHERE address="0x1B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501
def test_query_compound_history(rotkehlchen_api_server, ethereum_accounts): # pylint: disable=unused-argument # noqa: E501 """Check querying the compound history endpoint works. Uses real data""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, eth_balances=['1000000', '2000000', '33000030003', '42323213'], token_balances={}, btc_accounts=None, original_queries=['zerion'], ) # Since this test can be a bit slow we don't run both async and sync in the same test run # Instead we randomly choose one. Eventually both cases will be covered. async_query = random.choice([True, False]) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get(api_url_for( rotkehlchen_api_server, "compoundhistoryresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) # Timeout of 120 since this test can take a long time outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=120) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert len(result) == 5 expected_events = process_result_list(EXPECTED_EVENTS) # Check only 22 first events, since this is how many there were in the time of # the writing of the test. Also don't check events for one of the addresses # as it's added later, has many events and it's only to see we handle repay correctly to_check_events = [ x for x in result['events'] if x['address'] != '0x65304d6aff5096472519ca86a6a1fea31cb47Ced' ] assert to_check_events[:22] == expected_events # Check one repay event other_events = [ x for x in result['events'] if x['address'] == '0x65304d6aff5096472519ca86a6a1fea31cb47Ced' ] assert other_events[12]['event_type'] == 'repay' expected_hash = '0x48a3e2ef8a746383deac34d74f2f0ea0451b2047701fbed4b9d769a782888eea' assert other_events[12]['tx_hash'] == expected_hash assert other_events[12]['value']['amount'] == '0.55064402' # Check interest profit mappings profit_0 = result['interest_profit'][ '0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12'] assert FVal(profit_0['DAI']['amount']) > FVal(9) profit_1 = result['interest_profit'][ '0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(profit_1['USDC']['amount']) > FVal(2) profit_2 = result['interest_profit'][ '0xF59D4937BF1305856C3a267bB07791507a3377Ee'] assert FVal(profit_2['DAI']['amount']) > FVal('0.3') # Check debt loss mappings debt_0 = result['debt_loss']['0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(debt_0['cUSDC']['amount']) > FVal('84') assert FVal(debt_0['ETH']['amount']) > FVal('0.000012422') # Check liquidation profit mappings lprofit_0 = result['liquidation_profit'][ '0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(lprofit_0['ETH']['amount']) > FVal('0.000012') # Check rewards mappings rewards_0 = result['rewards']['0xC440f3C87DC4B6843CABc413916220D4f4FeD117'] assert FVal(rewards_0['COMP']['amount']) > FVal('0.000036') rewards_1 = result['rewards']['0xF59D4937BF1305856C3a267bB07791507a3377Ee'] assert FVal(rewards_1['COMP']['amount']) > FVal('0.003613')
def test_get_balances( rotkehlchen_api_server, ethereum_accounts, # pylint: disable=unused-argument rotki_premium_credentials, start_with_valid_premium, ): """Check querying the uniswap balances endpoint works. Uses real data Checks the functionality both for the graph queries (when premium) and simple onchain queries (without premium) THIS IS SUPER FREAKING SLOW. BE WARNED. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen premium = None if start_with_valid_premium: premium = Premium(rotki_premium_credentials) # Set module premium attribute rotki.chain_manager.uniswap.premium = premium response = requests.get( api_url_for(rotkehlchen_api_server, 'uniswapbalancesresource'), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( server=rotkehlchen_api_server, task_id=task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 10, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) if len(result) != 1: test_warnings.warn( UserWarning(f'Test account {LP_HOLDER_ADDRESS} has no uniswap balances'), ) return address_balances = result[LP_HOLDER_ADDRESS] for lp in address_balances: # LiquidityPool attributes assert lp['address'].startswith('0x') assert len(lp['assets']) == 2 if start_with_valid_premium: assert lp['total_supply'] is not None else: assert lp['total_supply'] is None assert lp['user_balance']['amount'] assert lp['user_balance']['usd_value'] # LiquidityPoolAsset attributes for lp_asset in lp['assets']: lp_asset_type = type(lp_asset['asset']) assert lp_asset_type in (str, dict) # Unknown asset, at least contains token address if lp_asset_type is dict: assert lp_asset['asset']['ethereum_address'].startswith('0x') # Known asset, contains identifier else: assert not lp_asset['asset'].startswith('0x') if start_with_valid_premium: assert lp_asset['total_amount'] is not None else: assert lp_asset['total_amount'] is None assert lp_asset['usd_price'] assert len(lp_asset['user_balance']) == 2 assert lp_asset['user_balance']['amount'] assert lp_asset['user_balance']['usd_value']
def test_query_asset_movements(rotkehlchen_api_server_with_exchanges): """Test that using the asset movements query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = prepare_rotki_for_history_processing_test( server.rest_api.rotkehlchen) # setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query asset movements of one specific exchange with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={ 'location': 'poloniex', 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] == 4 assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT assert_poloniex_asset_movements(result['entries'], deserialized=True) # query asset movements of all exchanges with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( [x for x in movements if x['location'] == 'poloniex'], True) assert_kraken_asset_movements( [x for x in movements if x['location'] == 'kraken'], True) def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( to_check_list=[ x for x in movements if x['location'] == 'poloniex' ], deserialized=True, movements_to_check=(1, 2), ) assert_kraken_asset_movements( to_check_list=[x for x in movements if x['location'] == 'kraken'], deserialized=True, movements_to_check=(0, 1, 2), ) # and now query them in a specific time range excluding some asset movements data = { 'from_timestamp': 1439994442, 'to_timestamp': 1458994442, 'async_query': async_query } with setup.polo_patch: response = requests.get(api_url_for(server, "assetmovementsresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource") + '?' + urlencode(data)) assert_okay(response)
def test_multiple_balance_queries_not_concurrent( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, number_of_eth_accounts, ): """Test multiple different balance query requests happening concurrently This tests that if multiple balance query requests happen concurrently we do not end up doing them multiple times, but reuse the results thanks to cache. """ rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts, btc_accounts) e = patch.object( rotki.chain_manager.ethchain, 'get_multieth_balance', wraps=rotki.chain_manager.ethchain.get_multieth_balance, ) binance = rotki.exchange_manager.connected_exchanges['binance'] b = patch.object(binance, 'api_query_dict', wraps=binance.api_query_dict) # Test all balances request by requesting to not save the data with setup.poloniex_patch, setup.binance_patch, setup.etherscan_patch, setup.alethio_patch, setup.bitcoin_patch, e as eth, b as bn: # noqa: E501 response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': True}, ) task_id_all = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "named_exchanges_balances_resource", name='binance', ), json={'async_query': True}) task_id_one_exchange = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True}) task_id_blockchain = assert_ok_async_response(response) outcome_all = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id_all) outcome_one_exchange = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id_one_exchange, ) outcome_blockchain = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id_blockchain, ) assert eth.call_count == 1, 'blockchain balance call should not happen concurrently' assert bn.call_count == 1, 'binance balance call should not happen concurrently' assert_all_balances( data=outcome_all, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) assert_binance_balances_result(outcome_one_exchange['result']) assert_eth_balances_result( rotki=rotki, json_data=outcome_blockchain, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=True, )
def test_query_eth2_info(rotkehlchen_api_server, ethereum_accounts): """This test uses real data and queries the eth2 deposit contract logs""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=[], original_queries=[ 'logs', 'transactions', 'blocknobytime', 'beaconchain' ], ) with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedetailsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 5, ) assert outcome['message'] == '' details = outcome['result'] else: details = assert_proper_response_with_result(response) with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedepositsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 10, ) assert outcome['message'] == '' deposits = outcome['result'] else: deposits = assert_proper_response_with_result(response) expected_pubkey = '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b' # noqa: E501 assert deposits[0] == { 'from_address': '0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397', 'log_index': 22, 'pubkey': expected_pubkey, 'timestamp': 1604506685, 'tx_hash': '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', 'deposit_index': 9, 'value': { 'amount': '32', 'usd_value': '32' }, 'withdrawal_credentials': '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 } assert FVal(details[0]['balance']['amount']) >= ZERO assert FVal(details[0]['balance']['usd_value']) >= ZERO assert details[0][ 'eth1_depositor'] == '0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397' # noqa: E501 assert details[0]['index'] == 9 assert details[0]['public_key'] == expected_pubkey for duration in ('1d', '1w', '1m', '1y'): performance = details[0][f'performance_{duration}'] # Can't assert for positive since they may go offline for a day and the test will fail # https://twitter.com/LefterisJP/status/1361091757274972160 assert FVal(performance['amount']) is not None assert FVal(performance['usd_value']) is not None
def test_add_and_query_manually_tracked_balances( rotkehlchen_api_server, ethereum_accounts, ): """Test that adding and querying manually tracked balances via the API works fine""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None) _populate_tags(rotkehlchen_api_server) response = requests.get( api_url_for( rotkehlchen_api_server, "manuallytrackedbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result[ 'balances'] == [], 'In the beginning we should have no entries' balances = _populate_initial_balances(rotkehlchen_api_server) # now query and make sure the added balances are returned response = requests.get( api_url_for( rotkehlchen_api_server, "manuallytrackedbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert_balances_match(expected_balances=balances, returned_balances=result['balances']) now = ts_now() # Also now test for https://github.com/rotki/rotki/issues/942 by querying for all balances # causing all balances to be saved and making sure the manual balances also got saved with ExitStack() as stack: setup.enter_ethereum_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, "allbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['BTC']['amount'] == '1.425' assert result['XMR']['amount'] == '50.315' assert result['BNB']['amount'] == '155' # Check DB to make sure a save happened assert rotki.data.db.get_last_balance_save_time() >= now assert set(rotki.data.db.query_owned_assets()) == { 'BTC', 'XMR', 'BNB', 'ETH', 'RDN' }
def test_query_eth2_deposits_details_and_stats(rotkehlchen_api_server, ethereum_accounts): """This test uses real data and queries the eth2 details, deposits and daily stats""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=[], original_queries=[ 'logs', 'transactions', 'blocknobytime', 'beaconchain' ], ) with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedetailsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 5, ) assert outcome['message'] == '' details = outcome['result'] else: details = assert_proper_response_with_result(response) with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedepositsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 10, ) assert outcome['message'] == '' deposits = outcome['result'] else: deposits = assert_proper_response_with_result(response) expected_pubkey = '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b' # noqa: E501 assert deposits[0] == { 'from_address': '0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397', 'tx_index': 15, 'pubkey': expected_pubkey, 'timestamp': 1604506685, 'tx_hash': '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', 'value': { 'amount': '32', 'usd_value': '32' }, 'withdrawal_credentials': '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 } assert FVal(details[0]['balance']['amount']) >= ZERO assert FVal(details[0]['balance']['usd_value']) >= ZERO assert details[0][ 'eth1_depositor'] == '0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397' # noqa: E501 assert details[0]['index'] == 9 assert details[0]['public_key'] == expected_pubkey for duration in ('1d', '1w', '1m', '1y'): performance = details[0][f'performance_{duration}'] # Can't assert for positive since they may go offline for a day and the test will fail # https://twitter.com/LefterisJP/status/1361091757274972160 assert FVal(performance['amount']) is not None assert FVal(performance['usd_value']) is not None # for daily stats let's have 3 validators new_index_1 = 43948 new_index_2 = 23948 response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'validator_index': new_index_1}, ) assert_simple_ok_response(response) response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'validator_index': new_index_2}, ) assert_simple_ok_response(response) # Query deposits again after including manually input validator with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedepositsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 10, ) assert outcome['message'] == '' deposits = outcome['result'] else: deposits = assert_proper_response_with_result(response) assert len(deposits) == 3 warnings = rotki.msg_aggregator.consume_warnings() errors = rotki.msg_aggregator.consume_errors() assert len(warnings) == 0 assert len(errors) == 0 # Now query eth2 details also including manually input validators to see they work with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2stakedetailsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 5, ) assert outcome['message'] == '' details = outcome['result'] else: details = assert_proper_response_with_result(response) # The 2 new validators along with their depositor details should be there assert len(details) == 3 assert details[0]['index'] == 9 # already checked above assert details[1]['index'] == new_index_2 assert details[1][ 'eth1_depositor'] == '0x234EE9e35f8e9749A002fc42970D570DB716453B' assert details[2]['index'] == new_index_1 assert details[2][ 'eth1_depositor'] == '0xc2288B408Dc872A1546F13E6eBFA9c94998316a2' warnings = rotki.msg_aggregator.consume_warnings() errors = rotki.msg_aggregator.consume_errors() assert len(warnings) == 0 assert len(errors) == 0 # query daily stats, first without cache -- requesting all json = {'only_cache': False} response = requests.post( api_url_for( rotkehlchen_api_server, 'eth2dailystatsresource', ), json=json, ) result = assert_proper_response_with_result(response) total_stats = len(result['entries']) assert total_stats == result['entries_total'] assert total_stats == result['entries_found'] full_sum_pnl = FVal(result['sum_pnl']) full_sum_usd_value = FVal(result['sum_usd_value']) calculated_sum_pnl = ZERO calculated_sum_usd_value = ZERO for entry in result['entries']: calculated_sum_pnl += FVal(entry['pnl']['amount']) calculated_sum_usd_value += FVal(entry['pnl']['usd_value']) assert full_sum_pnl.is_close(calculated_sum_pnl) assert full_sum_usd_value.is_close(calculated_sum_usd_value) # filter by validator_index queried_validators = [new_index_1, 9] json = {'only_cache': True, 'validators': queried_validators} response = requests.post( api_url_for( rotkehlchen_api_server, 'eth2dailystatsresource', ), json=json, ) result = assert_proper_response_with_result(response) assert result['entries_total'] == total_stats assert result['entries_found'] <= total_stats assert all(x['validator_index'] in queried_validators for x in result['entries']) # filter by validator_index and timestamp queried_validators = [new_index_1, 9] from_ts = 1613779200 to_ts = 1632182400 json = { 'only_cache': True, 'validators': queried_validators, 'from_timestamp': from_ts, 'to_timestamp': to_ts } # noqa: E501 response = requests.post( api_url_for( rotkehlchen_api_server, 'eth2dailystatsresource', ), json=json, ) result = assert_proper_response_with_result(response) assert result['entries_total'] == total_stats assert result['entries_found'] <= total_stats assert len(result['entries']) == result['entries_found'] full_sum_pnl = FVal(result['sum_pnl']) full_sum_usd_value = FVal(result['sum_usd_value']) calculated_sum_pnl = ZERO calculated_sum_usd_value = ZERO next_page_times = [] for idx, entry in enumerate(result['entries']): calculated_sum_pnl += FVal(entry['pnl']['amount']) calculated_sum_usd_value += FVal(entry['pnl']['usd_value']) assert entry['validator_index'] in queried_validators time = entry['timestamp'] assert time >= from_ts assert time <= to_ts if 5 <= idx <= 9: next_page_times.append(time) if idx >= result['entries_found'] - 1: continue assert entry['timestamp'] >= result['entries'][idx + 1]['timestamp'] assert full_sum_pnl.is_close(calculated_sum_pnl) assert full_sum_usd_value.is_close(calculated_sum_usd_value) # filter by validator_index and timestamp and add pagination json = { 'only_cache': True, 'validators': queried_validators, 'from_timestamp': from_ts, 'to_timestamp': to_ts, 'limit': 5, 'offset': 5 } # noqa: E501 response = requests.post( api_url_for( rotkehlchen_api_server, 'eth2dailystatsresource', ), json=json, ) result = assert_proper_response_with_result(response) assert result['entries_total'] == total_stats assert result['entries_found'] <= total_stats assert FVal( result['sum_pnl']) == full_sum_pnl, 'pagination should show same sum' assert FVal(result['sum_usd_value'] ) == full_sum_usd_value, 'pagination should show same sum' assert len(result['entries']) == 5 for idx, entry in enumerate(result['entries']): assert entry['validator_index'] in queried_validators time = entry['timestamp'] assert time >= from_ts assert time <= to_ts if idx <= 4: assert time == next_page_times[idx]
def test_remove_blockchain_accounts_async( rotkehlchen_api_server, ethereum_accounts, btc_accounts, number_of_eth_accounts, ): """A simpler version of the above test for removing blockchain accounts for async The main purpose of this test is to see that querying the endpoint asynchronously also works""" # Disable caching of query results rotki = rotkehlchen_api_server.rest_api.rotkehlchen rotki.blockchain.cache_ttl_secs = 0 # Test by having balances queried before removing an account removed_eth_accounts = [ethereum_accounts[0], ethereum_accounts[2]] eth_accounts_after_removal = [ethereum_accounts[1], ethereum_accounts[3]] all_eth_balances = ['1000000', '2000000', '3000000', '4000000'] token_balances = {'RDN': ['0', '250000000', '450000000', '0']} eth_balances_after_removal = ['2000000', '4000000'] token_balances_after_removal = {'RDN': ['250000000', '0']} setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts, eth_balances=all_eth_balances, token_balances=token_balances, ) with setup.etherscan_patch, setup.bitcoin_patch: response = requests.get( api_url_for( rotkehlchen_api_server, "blockchainbalancesresource", )) setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts, eth_balances=all_eth_balances, token_balances=token_balances, ) # The application has started with 4 ethereum accounts. Remove two and see that balances match with setup.etherscan_patch: response = requests.delete(api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='ETH', ), json={ 'accounts': removed_eth_accounts, 'async_query': True }) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_eth_balances_result( rotki=rotki, json_data=outcome, eth_accounts=eth_accounts_after_removal, eth_balances=eth_balances_after_removal, token_balances=token_balances_after_removal, also_btc=True, # We queried all balances at the start ) # Also make sure they are removed from the DB accounts = rotki.data.db.get_blockchain_accounts() assert len(accounts.eth) == 2 assert all(acc in accounts.eth for acc in eth_accounts_after_removal) assert len(accounts.btc) == 2 assert all(acc in accounts.btc for acc in btc_accounts) # Now try to query all balances to make sure the result is the stored with setup.etherscan_patch, setup.bitcoin_patch: response = requests.get( api_url_for( rotkehlchen_api_server, "blockchainbalancesresource", )) assert_proper_response(response) json_data = response.json() assert json_data['message'] == '' assert_eth_balances_result( rotki=rotki, json_data=json_data, eth_accounts=eth_accounts_after_removal, eth_balances=eth_balances_after_removal, token_balances=token_balances_after_removal, also_btc=True, ) # Now we will try to remove a BTC account. Setup the mocking infrastructure again all_btc_accounts = [UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2] btc_accounts_after_removal = [UNIT_BTC_ADDRESS2] setup = setup_balances( rotki, ethereum_accounts=eth_accounts_after_removal, btc_accounts=all_btc_accounts, eth_balances=eth_balances_after_removal, token_balances=token_balances_after_removal, btc_balances=['3000000', '5000000'], ) # remove the new BTC account with setup.bitcoin_patch: response = requests.delete(api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='BTC', ), json={ 'accounts': [UNIT_BTC_ADDRESS1], 'async_query': True }) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_btc_balances_result( json_data=outcome, btc_accounts=btc_accounts_after_removal, btc_balances=['5000000'], also_eth=True, ) # Also make sure it's removed from the DB accounts = rotki.data.db.get_blockchain_accounts() assert len(accounts.eth) == 2 assert all(acc in accounts.eth for acc in eth_accounts_after_removal) assert len(accounts.btc) == 1 assert all(acc in accounts.btc for acc in btc_accounts_after_removal) # Now try to query all balances to make sure the result is also stored with setup.etherscan_patch, setup.bitcoin_patch: response = requests.get( api_url_for( rotkehlchen_api_server, "blockchainbalancesresource", )) assert_proper_response(response) json_data = response.json() assert json_data['message'] == '' assert_btc_balances_result( json_data=json_data, btc_accounts=btc_accounts_after_removal, btc_balances=['5000000'], also_eth=True, )
def test_add_blockchain_accounts_async( rotkehlchen_api_server, ethereum_accounts, btc_accounts, number_of_eth_accounts, ): """A simpler version of the above test for adding blockchain accounts for async The main purpose of this test is to see that querying the endpoint asynchronously also works""" # Disable caching of query results rotki = rotkehlchen_api_server.rest_api.rotkehlchen rotki.blockchain.cache_ttl_secs = 0 # Test by having balances queried before adding an account eth_balances = ['1000000', '2000000'] token_balances = {'RDN': ['0', '4000000']} setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts, eth_balances=eth_balances, token_balances=token_balances, ) new_eth_accounts = [make_ethereum_address(), make_ethereum_address()] all_eth_accounts = ethereum_accounts + new_eth_accounts eth_balances = ['1000000', '2000000', '3000000', '4000000'] token_balances = {'RDN': ['0', '4000000', '0', '250000000']} setup = setup_balances( rotki, ethereum_accounts=all_eth_accounts, btc_accounts=btc_accounts, eth_balances=eth_balances, token_balances=token_balances, ) # The application has started only with 2 ethereum accounts. Let's add two more with setup.etherscan_patch: response = requests.put(api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='ETH', ), json={ 'accounts': new_eth_accounts, 'async_query': True }) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_eth_balances_result( rotki=rotki, json_data=outcome, eth_accounts=all_eth_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=False, # All blockchain assets have not been queried yet ) # Also make sure they are added in the DB accounts = rotki.data.db.get_blockchain_accounts() assert len(accounts.eth) == 4 assert all(acc in accounts.eth for acc in all_eth_accounts) assert len(accounts.btc) == 2 assert all(acc in accounts.btc for acc in btc_accounts) # Now try to query all balances to make sure the result is the stored with setup.etherscan_patch, setup.bitcoin_patch: response = requests.get( api_url_for( rotkehlchen_api_server, "blockchainbalancesresource", )) assert_proper_response(response) json_data = response.json() assert json_data['message'] == '' assert_eth_balances_result( rotki=rotki, json_data=json_data, eth_accounts=all_eth_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=True, ) # Now we will try to add a new BTC account. Setup the mocking infrastructure again all_btc_accounts = btc_accounts + [UNIT_BTC_ADDRESS3] setup = setup_balances( rotki, ethereum_accounts=all_eth_accounts, btc_accounts=all_btc_accounts, eth_balances=eth_balances, token_balances=token_balances, btc_balances=['3000000', '5000000', '600000000'], ) # add the new BTC account with setup.bitcoin_patch: response = requests.put(api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='BTC', ), json={ 'accounts': [UNIT_BTC_ADDRESS3], 'async_query': True }) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert_btc_balances_result( json_data=outcome, btc_accounts=all_btc_accounts, btc_balances=setup.btc_balances, also_eth=True, ) # Also make sure it's added in the DB accounts = rotki.data.db.get_blockchain_accounts() assert len(accounts.eth) == 4 assert all(acc in accounts.eth for acc in all_eth_accounts) assert len(accounts.btc) == 3 assert all(acc in accounts.btc for acc in all_btc_accounts) # Now try to query all balances to make sure the result is also stored with setup.etherscan_patch, setup.bitcoin_patch: response = requests.get( api_url_for( rotkehlchen_api_server, "blockchainbalancesresource", )) assert_proper_response(response) json_data = response.json() assert json_data['message'] == '' assert_btc_balances_result( json_data=json_data, btc_accounts=all_btc_accounts, btc_balances=setup.btc_balances, also_eth=True, )
def test_query_transactions(rotkehlchen_api_server): """Test that querying the ethereum transactions endpoint works as expected This test uses real data. Found an ethereum address that has very few transactions and hopefully won't have more. If it does we can adjust the test. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Check that we get all transactions response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) expected_result = EXPECTED_AFB7_TXS + EXPECTED_4193_TXS expected_result.sort(key=lambda x: x['timestamp']) expected_result.reverse() assert result['entries'] == expected_result assert result['entries_found'] == len(expected_result) assert result['entries_limit'] == FREE_ETH_TX_LIMIT # Check that transactions per address and in a specific time range can be # queried and that this is from the DB and not etherscan def mock_etherscan_get(url, *args, **kwargs): # pylint: disable=unused-argument return MockResponse(200, "{}") etherscan_patch = patch.object(rotki.etherscan.session, 'get', wraps=mock_etherscan_get) with etherscan_patch as mock_call: response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address='0xaFB7ed3beBE50E0b62Fa862FAba93e7A46e59cA7', ), json={ 'async_query': async_query, "from_timestamp": 1461399856, "to_timestamp": 1494458860, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert mock_call.call_count == 0 assert result['entries'] == EXPECTED_AFB7_TXS[2:4][::-1]
def test_get_events_history_filtering_by_timestamp( rotkehlchen_api_server, ethereum_accounts, # pylint: disable=unused-argument ): """Test the events balances from 1627401169 to 1627401170 (both included).""" # Call time range from_timestamp = 1627401169 to_timestamp = 1627401170 async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, eth_balances=['33000030003'], token_balances={}, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) # Force insert address' last used query range, for avoiding query all rotki.data.db.update_used_query_range( name=f'{SUSHISWAP_EVENTS_PREFIX}_{TEST_EVENTS_ADDRESS_1}', start_ts=Timestamp(0), end_ts=from_timestamp, ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'sushiswapeventshistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': from_timestamp, 'to_timestamp': to_timestamp, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=120) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) events_balances = result[TEST_EVENTS_ADDRESS_1] assert len(events_balances) == 1 assert EXPECTED_EVENTS_BALANCES_1[0].serialize() == events_balances[0] # Make sure they end up in the DB events = rotki.data.db.get_amm_events([EventType.MINT_SUSHISWAP, EventType.BURN_SUSHISWAP]) assert len(events) != 0 # test sushiswap data purging from the db works response = requests.delete(api_url_for( rotkehlchen_api_server, 'namedethereummoduledataresource', module_name='sushiswap', )) assert_simple_ok_response(response) events = rotki.data.db.get_amm_events([EventType.MINT_SUSHISWAP, EventType.BURN_SUSHISWAP]) assert len(events) == 0
def test_get_events( rotkehlchen_api_server, ethereum_accounts, # pylint: disable=unused-argument rotki_premium_credentials, # pylint: disable=unused-argument start_with_valid_premium, # pylint: disable=unused-argument ): async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Set module premium is required for calling `get_balances()` premium = None if start_with_valid_premium: premium = Premium(rotki_premium_credentials) rotki.chain_manager.adex.premium = premium setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get( api_url_for(rotkehlchen_api_server, 'adexhistoryresource'), json={ 'async_query': async_query, 'to_timestamp': 1611747322 }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) identity_address = '0x2a6c38D16BFdc7b4a20f1F982c058F07BDCe9204' tom_pool_id = '0x2ce0c96383fb229d9776f33846e983a956a7d95844fac57b180ed0071d93bb28' bond_id = '0x540cab9883923c01e657d5da4ca5674b6e4626b4a148224635495502d674c7c5' channel_id = '0x30d87bab0ef1e7f8b4c3b894ca2beed41bbd54c481f31e5791c1e855c9dbf4ba' result = result[ADEX_TEST_ADDR] expected_events = [ Bond( tx_hash= '0x9989f47c6c0a761f98f910ac24e2438d858be96c12124a13be4bb4b3150c55ea', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1604366004, bond_id=bond_id, pool_id=tom_pool_id, value=Balance(FVal(100000), FVal(200000)), nonce=0, slashed_at=0, ), ChannelWithdraw( tx_hash= '0xa9ee91af823c0173fc5ada908ff9fe3f4d7c84a2c9da795f0889b3f4ace75b13', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1607453764, channel_id=channel_id, pool_id=tom_pool_id, value=Balance(FVal('5056.894263641728544592'), FVal('0')), token=None, ), Unbond( tx_hash= '0xa9ee91af823c0173fc5ada908ff9fe3f4d7c84a2c9da795f0889b3f4ace75b13', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1607453764, bond_id=bond_id, pool_id=tom_pool_id, value=Balance(FVal(100000), FVal(200000)), ) ] assert len(result['events']) == 8 assert result['events'][:len(expected_events)] == [ x.serialize() for x in expected_events ] assert 'staking_details' in result # Make sure events end up in the DB assert len(rotki.data.db.get_adex_events()) != 0 # test adex data purging from the db works response = requests.delete( api_url_for( rotkehlchen_api_server, 'namedethereummoduledataresource', module_name='adex', )) assert_simple_ok_response(response) assert len(rotki.data.db.get_adex_events()) == 0
def test_query_transactions(rotkehlchen_api_server): """Test that querying the ethereum transactions endpoint works as expected This test uses real data. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Check that we get all transactions response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) expected_result = EXPECTED_AFB7_TXS + EXPECTED_4193_TXS expected_result.sort(key=lambda x: x['timestamp']) expected_result.reverse() # Make sure that all of the transactions we expect are there and in order # There can be more transactions (since the address can make more) # but this check ignores them previous_index = 0 result_entries = [x['entry'] for x in result['entries']] assert all(x['ignored_in_accounting'] is False for x in result['entries'] ), 'by default nothing should be ignored' # noqa: E501 for entry in expected_result: assert entry in result_entries entry_idx = result_entries.index(entry) if previous_index != 0: assert entry_idx == previous_index + 1 previous_index = entry_idx assert result['entries_found'] >= len(expected_result) assert result['entries_limit'] == FREE_ETH_TX_LIMIT # now let's ignore two transactions ignored_ids = [ EXPECTED_AFB7_TXS[2]['tx_hash'] + EXPECTED_AFB7_TXS[2]['from_address'] + str(EXPECTED_AFB7_TXS[2]['nonce']), # noqa: E501 EXPECTED_AFB7_TXS[3]['tx_hash'] + EXPECTED_AFB7_TXS[3]['from_address'] + str(EXPECTED_AFB7_TXS[3]['nonce']), # noqa: E501 ] response = requests.put( api_url_for( rotkehlchen_api_server, "ignoredactionsresource", ), json={ 'action_type': 'ethereum transaction', 'action_ids': ignored_ids }, ) result = assert_proper_response_with_result(response) assert result == {'ethereum transaction': ignored_ids} # Check that transactions per address and in a specific time range can be # queried and that this is from the DB and not etherscan def mock_etherscan_get(url, *args, **kwargs): # pylint: disable=unused-argument return MockResponse(200, "{}") etherscan_patch = patch.object(rotki.etherscan.session, 'get', wraps=mock_etherscan_get) with etherscan_patch as mock_call: response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address='0xaFB7ed3beBE50E0b62Fa862FAba93e7A46e59cA7', ), json={ 'async_query': async_query, "from_timestamp": 1461399856, "to_timestamp": 1494458860, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert mock_call.call_count == 0 result_entries = [x['entry'] for x in result['entries']] assert result_entries == EXPECTED_AFB7_TXS[2:4][::-1] msg = 'the transactions we ignored have not been ignored for accounting' assert all(x['ignored_in_accounting'] is True for x in result['entries']), msg
def test_multiple_balance_queries_not_concurrent( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, separate_blockchain_calls, ): """Test multiple different balance query requests happening concurrently This tests that if multiple balance query requests happen concurrently we do not end up doing them multiple times, but reuse the results thanks to cache. Try running both all blockchain balances in one call and each blockchain call separately. """ rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts, btc_accounts) multieth_balance_patch = patch.object( rotki.chain_manager.ethereum, 'get_multieth_balance', wraps=rotki.chain_manager.ethereum.get_multieth_balance, ) btc_balances_patch = patch( 'rotkehlchen.chain.manager.get_bitcoin_addresses_balances', wraps=get_bitcoin_addresses_balances, ) binance = rotki.exchange_manager.connected_exchanges['binance'] binance_querydict_patch = patch.object(binance, 'api_query_dict', wraps=binance.api_query_dict) # Test all balances request by requesting to not save the data with ExitStack() as stack: setup.enter_all_patches(stack) eth = stack.enter_context(multieth_balance_patch) btc = stack.enter_context(btc_balances_patch) bn = stack.enter_context(binance_querydict_patch) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': True}, ) task_id_all = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "named_exchanges_balances_resource", name='binance', ), json={'async_query': True}) task_id_one_exchange = assert_ok_async_response(response) if separate_blockchain_calls: response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True, 'blockchain': 'ETH'}) task_id_blockchain_eth = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True, 'blockchain': 'BTC'}) task_id_blockchain_btc = assert_ok_async_response(response) else: response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True}) task_id_blockchain = assert_ok_async_response(response) outcome_all = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_all, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) outcome_one_exchange = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id_one_exchange, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) if separate_blockchain_calls: outcome_eth = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain_eth, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) outcome_btc = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain_btc, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) else: outcome_blockchain = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) assert eth.call_count == 1, 'eth balance query should only fire once' assert btc.call_count == 1, 'btc balance query should only happen once' assert bn.call_count == 3, 'binance balance query should do 2 calls' assert_all_balances( result=outcome_all, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) assert_binance_balances_result(outcome_one_exchange['result']) if not separate_blockchain_calls: outcome_eth = outcome_blockchain outcome_btc = outcome_blockchain assert_eth_balances_result( rotki=rotki, result=outcome_eth, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=not separate_blockchain_calls, ) assert_btc_balances_result( result=outcome_btc, btc_accounts=btc_accounts, btc_balances=setup.btc_balances, also_eth=not separate_blockchain_calls, )
def test_exchange_query_trades(rotkehlchen_api_server_with_exchanges): """Test that using the exchange trades query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query trades of one specific exchange with setup.binance_patch: response = requests.get( api_url_for( server, "tradesresource", ), json={'location': 'binance', 'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] > 0 assert result['entries_limit'] == FREE_TRADES_LIMIT assert_binance_trades_result([x['entry'] for x in result['entries']]) # query trades of all exchanges with setup.binance_patch, setup.polo_patch: response = requests.get( api_url_for(server, "tradesresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) trades = result['entries'] assert_binance_trades_result([x['entry'] for x in trades if x['entry']['location'] == 'binance']) # noqa: E501 assert_poloniex_trades_result([x['entry'] for x in trades if x['entry']['location'] == 'poloniex']) # noqa: E501 def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) trades = result['entries'] assert_binance_trades_result([x['entry'] for x in trades if x['entry']['location'] == 'binance']) # noqa: E501 assert_poloniex_trades_result( trades=[x['entry'] for x in trades if x['entry']['location'] == 'poloniex'], trades_to_check=(2,), ) # and now query them in a specific time range excluding two of poloniex's trades data = {'from_timestamp': 1499865548, 'to_timestamp': 1539713118, 'async_query': async_query} with setup.binance_patch, setup.polo_patch: response = requests.get(api_url_for(server, "tradesresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.binance_patch, setup.polo_patch: response = requests.get( api_url_for(server, "tradesresource") + '?' + urlencode(data)) assert_okay(response)
def test_query_yearn_vault_history(rotkehlchen_api_server, ethereum_accounts): """Check querying the yearn vaults history endpoint works. Uses real data. """ async_query = random.choice([True, False]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) for _ in range(2): # Run 2 times to make sure that loading data from DB the 2nd time works fine with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens (not needed with infura) setup.enter_ethereum_patches(stack) response = requests.get(api_url_for( rotkehlchen_api_server, "yearnvaultshistoryresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=600) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) # Make sure some data was saved in the DB after first call events = rotki.data.db.get_yearn_vaults_events( TEST_ACC1, YEARN_VAULTS['yyDAI+yUSDC+yUSDT+yTUSD'], ) assert len(events) >= 11 result = result[TEST_ACC1] check_vault_history('YALINK Vault', EXPECTED_HISTORY, result) check_vault_history('YCRV Vault', EXPECTED_HISTORY, result) check_vault_history('YSRENCURVE Vault', EXPECTED_HISTORY, result) check_vault_history('YUSDC Vault', EXPECTED_HISTORY, result) check_vault_history('YUSDT Vault', EXPECTED_HISTORY, result) check_vault_history('YYFI Vault', EXPECTED_HISTORY, result) # Make sure events end up in the DB # test yearn vault data purging from the db works response = requests.delete( api_url_for( rotkehlchen_api_server, 'ethereummoduledataresource', module_name='yearn_vaults', )) assert_simple_ok_response(response) events = rotki.data.db.get_yearn_vaults_events( TEST_ACC1, YEARN_VAULTS['yyDAI+yUSDC+yUSDT+yTUSD'], ) assert len(events) == 0
def test_query_asset_movements(rotkehlchen_api_server_with_exchanges): """Test that using the asset movements query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = prepare_rotki_for_history_processing_test(server.rest_api.rotkehlchen) # setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query asset movements of one specific exchange with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={'location': 'poloniex', 'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] == 4 assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT poloniex_ids = [x['entry']['identifier'] for x in result['entries']] assert_poloniex_asset_movements([x['entry'] for x in result['entries']], deserialized=True) assert all(x['ignored_in_accounting'] is False for x in result['entries']), 'ignored should be false' # noqa: E501 # now let's ignore all poloniex action ids response = requests.put( api_url_for( rotkehlchen_api_server_with_exchanges, "ignoredactionsresource", ), json={'action_type': 'asset movement', 'action_ids': poloniex_ids}, ) result = assert_proper_response_with_result(response) assert set(result['asset movement']) == set(poloniex_ids) # query asset movements of all exchanges with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements([x['entry'] for x in movements if x['entry']['location'] == 'poloniex'], True) # noqa: E501 assert_kraken_asset_movements([x['entry'] for x in movements if x['entry']['location'] == 'kraken'], True) # noqa: E501 def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( to_check_list=[x['entry'] for x in movements if x['entry']['location'] == 'poloniex'], deserialized=True, movements_to_check=(1, 2), ) msg = 'poloniex asset movements should have now been ignored for accounting' assert all(x['ignored_in_accounting'] is True for x in movements if x['entry']['location'] == 'poloniex'), msg # noqa: E501 assert_kraken_asset_movements( to_check_list=[x['entry'] for x in movements if x['entry']['location'] == 'kraken'], deserialized=True, movements_to_check=(0, 1, 2), ) # and now query them in a specific time range excluding some asset movements data = {'from_timestamp': 1439994442, 'to_timestamp': 1458994442, 'async_query': async_query} with setup.polo_patch: response = requests.get(api_url_for(server, "assetmovementsresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource") + '?' + urlencode(data)) assert_okay(response)
def test_get_events_history_filtering_by_timestamp_case1( rotkehlchen_api_server, ethereum_accounts, # pylint: disable=unused-argument rotki_premium_credentials, # pylint: disable=unused-argument start_with_valid_premium, # pylint: disable=unused-argument ): """Test the events balances from 1604273256 to 1604283808 (both included). LPs involved by the address within this time range: 1, $BASED-WETH By calling the endpoint with a specific time range: - Not all the events are queried. - The events balances do not factorise the current balances in the protocol (meaning the response amounts should be assertable). """ # Call time range from_timestamp = 1604273256 to_timestamp = 1604283808 async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, eth_balances=['33000030003'], token_balances={}, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) # Force insert address' last used query range, for avoiding query all rotki.data.db.update_used_query_range( name=f'{UNISWAP_EVENTS_PREFIX}_{TEST_EVENTS_ADDRESS_1}', start_ts=Timestamp(0), end_ts=from_timestamp, ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'uniswapeventshistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': from_timestamp, 'to_timestamp': to_timestamp, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=120) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) events_balances = result[TEST_EVENTS_ADDRESS_1] assert len(events_balances) == 1 assert EXPECTED_EVENTS_BALANCES_1[0].serialize() == events_balances[0]
def test_query_aave_balances(rotkehlchen_api_server, ethereum_accounts): """Check querying the aave balances endpoint works. Uses real data. TODO: Here we should use a test account for which we will know what balances it has and we never modify """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, original_queries=['zerion'], ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get(api_url_for( rotkehlchen_api_server, "aavebalancesresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) if len(result) == 0: test_warnings.warn( UserWarning( f'Test account {AAVE_BALANCESV1_TEST_ACC} and {AAVE_BALANCESV2_TEST_ACC} have no aave balances' )) # noqa: E501 return def _assert_valid_entries(balances: Dict[str, Any]) -> None: lending = v1_balances['lending'] for _, entry in lending.items(): assert len(entry) == 2 assert len(entry['balance']) == 2 assert 'amount' in entry['balance'] assert 'usd_value' in entry['balance'] assert '%' in entry['apy'] borrowing = balances['borrowing'] for _, entry in borrowing.items(): assert len(entry) == 3 assert len(entry['balance']) == 2 assert 'amount' in entry['balance'] assert 'usd_value' in entry['balance'] assert '%' in entry['variable_apr'] assert '%' in entry['stable_apr'] v1_balances = result.get(AAVE_BALANCESV1_TEST_ACC) if v1_balances: _assert_valid_entries(v1_balances) else: test_warnings.warn( UserWarning( f'Test account {AAVE_BALANCESV1_TEST_ACC} has no aave v1 balances' )) # noqa: E501 v2_balances = result.get(AAVE_BALANCESV2_TEST_ACC) if v2_balances: _assert_valid_entries(v2_balances) else: test_warnings.warn( UserWarning( f'Test account {AAVE_BALANCESV2_TEST_ACC} has no aave v2 balances' )) # noqa: E501