def test_add_delete_xpub(rotkehlchen_api_server): """This test uses real world data (queries actual BTC balances) Test data from here: https://github.com/LedgerHQ/bitcoin-keychain-svc/blob/744736af1819cdab0a46ea7faf834008aeade6b1/integration/p2pkh_keychain_test.go#L40-L95 """ # Disable caching of query results rotki = rotkehlchen_api_server.rest_api.rotkehlchen rotki.chain_manager.cache_ttl_secs = 0 async_query = random.choice([False, True]) tag1 = { 'name': 'ledger', 'description': 'My ledger accounts', 'background_color': 'ffffff', 'foreground_color': '000000', } response = requests.put( api_url_for( rotkehlchen_api_server, 'tagsresource', ), json=tag1, ) tag2 = { 'name': 'public', 'description': 'My public accounts', 'background_color': 'ffffff', 'foreground_color': '000000', } response = requests.put( api_url_for( rotkehlchen_api_server, 'tagsresource', ), json=tag2, ) assert_proper_response(response) xpub1 = 'xpub6DCi5iJ57ZPd5qPzvTm5hUt6X23TJdh9H4NjNsNbt7t7UuTMJfawQWsdWRFhfLwkiMkB1rQ4ZJWLB9YBnzR7kbs9N8b2PsKZgKUHQm1X4or' # noqa : E501 xpub1_label = 'ledger_test_xpub' xpub1_tags = ['ledger', 'public'] json_data = { 'async_query': async_query, 'xpub': xpub1, 'label': xpub1_label, 'tags': xpub1_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) _check_xpub_addition_outcome(outcome, xpub1) # Make sure that adding existing xpub fails json_data = { 'async_query': False, 'xpub': xpub1, 'label': xpub1_label, 'tags': xpub1_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) assert_error_response( response=response, contained_in_msg=f'Xpub {xpub1} with derivation path None is already tracked', status_code=HTTPStatus.BAD_REQUEST, ) # Add an xpub with no derived addresses xpub2 = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa : E501 xpub2_label = None xpub2_tags = None json_data = { 'async_query': async_query, 'xpub': xpub2, 'label': xpub2_label, 'tags': xpub2_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) _check_xpub_addition_outcome(outcome, xpub1) # Also make sure that blockchain account data endpoint returns everything correctly response = requests.get(api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='BTC', )) outcome = assert_proper_response_with_result(response) assert len(outcome['standalone']) == 2 for entry in outcome['standalone']: assert entry['address'] in (UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2) assert entry['tags'] is None assert entry['label'] is None assert len(outcome['xpubs']) == 2 for entry in outcome['xpubs']: assert len(entry) == 5 if entry['xpub'] == xpub1: for address_data in entry['addresses']: assert address_data['address'] in EXPECTED_XPUB_ADDESSES assert address_data['label'] is None assert address_data['tags'] == xpub1_tags else: assert entry['xpub'] == xpub2 assert entry['addresses'] is None assert entry['label'] is None assert entry['tags'] is None # Now delete the xpub and make sure all derived addresses are gone json_data = { 'async_query': async_query, 'xpub': xpub1, 'derivation_path': None, } response = requests.delete(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) btc = outcome['per_account']['BTC'] assert len(btc['standalone']) == 2 assert UNIT_BTC_ADDRESS1 in btc['standalone'] assert UNIT_BTC_ADDRESS2 in btc['standalone'] assert 'xpubs' not in btc totals = outcome['totals']['assets'] assert totals['BTC']['amount'] is not None assert totals['BTC']['usd_value'] is not None # Also make sure all mappings are gone from the DB cursor = rotki.data.db.conn.cursor() result = cursor.execute('SELECT object_reference from tag_mappings;').fetchall() assert len(result) == 0, 'all tag mappings should have been deleted' result = cursor.execute('SELECT * from xpub_mappings WHERE xpub=?', (xpub1,)).fetchall() assert len(result) == 0, 'all xpub mappings should have been deleted'
def test_query_transactions_from_to_address( rotkehlchen_api_server, ethereum_accounts, ): """Make sure that if a transaction is just being sent to an address it's also returned.""" start_ts = 0 end_ts = 1598453214 rotki = rotkehlchen_api_server.rest_api.rotkehlchen db = rotki.data.db transactions = [ EthereumTransaction( tx_hash=b'1', timestamp=0, block_number=0, from_address=ethereum_accounts[0], to_address=make_ethereum_address(), value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=0, ), EthereumTransaction( tx_hash=b'2', timestamp=0, block_number=0, from_address=ethereum_accounts[0], to_address=ethereum_accounts[1], value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=1, ), EthereumTransaction( tx_hash=b'3', timestamp=0, block_number=0, from_address=make_ethereum_address(), to_address=ethereum_accounts[0], value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=55, ) ] db.add_ethereum_transactions(transactions, from_etherscan=True) # Also make sure to update query ranges so as not to query etherscan at all for address in ethereum_accounts: DBQueryRanges(db).update_used_query_range( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ranges_to_query=[], ) expected_entries = {ethereum_accounts[0]: 3, ethereum_accounts[1]: 1} # Check that we get all transactions correctly even if we query two times for _ in range(2): for address in ethereum_accounts: response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'address': address }, ) result = assert_proper_response_with_result(response) assert len(result['entries']) == expected_entries[address] assert result['entries_found'] == 3
def test_transaction_same_hash_same_nonce_two_tracked_accounts( rotkehlchen_api_server, ethereum_accounts, ): """Make sure that if we track two addresses and they send one transaction to each other it's not counted as duplicate in the DB but is returned every by both addresses""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen def mock_etherscan_transaction_response(etherscan: Etherscan, eth_accounts): def mocked_request_dict(url, *_args, **_kwargs): addr1_tx = f"""{{"blockNumber":"1","timeStamp":"1","hash":"0x9c81f44c29ff0226f835cd0a8a2f2a7eca6db52a711f8211b566fd15d3e0e8d4","nonce":"0","blockHash":"0xd3cabad6adab0b52ea632c386ea19403680571e682c62cb589b5abcd76de2159","transactionIndex":"0","from":"{eth_accounts[0]}","to":"{eth_accounts[1]}","value":"1","gas":"2000000","gasPrice":"10000000000000","isError":"0","txreceipt_status":"","input":"0x","contractAddress":"","cumulativeGasUsed":"1436963","gasUsed":"1436963","confirmations":"1"}}""" # noqa: E501 addr2_txs = f"""{addr1_tx}, {{"blockNumber":"2","timeStamp":"2","hash":"0x1c81f54c29ff0226f835cd0a2a2f2a7eca6db52a711f8211b566fd15d3e0e8d4","nonce":"1","blockHash":"0xd1cabad2adab0b56ea632c386ea19403680571e682c62cb589b5abcd76de2159","transactionIndex":"0","from":"{eth_accounts[1]}","to":"{make_ethereum_address()}","value":"1","gas":"2000000","gasPrice":"10000000000000","isError":"0","txreceipt_status":"","input":"0x","contractAddress":"","cumulativeGasUsed":"1436963","gasUsed":"1436963","confirmations":"1"}}""" # noqa: E501 if '=txlistinternal&' in url: # don't return any internal transactions payload = '{"status":"1","message":"OK","result":[]}' elif '=txlist&' in url: if eth_accounts[0] in url: tx_str = addr1_tx elif eth_accounts[1] in url: tx_str = addr2_txs else: raise AssertionError( 'Requested etherscan transactions for unknown address in tests', ) payload = f'{{"status":"1","message":"OK","result":[{tx_str}]}}' elif '=getblocknobytime&' in url: # we don't really care about this so just return whatever payload = '{"status":"1","message":"OK","result": "1"}' return MockResponse(200, payload) return patch.object(etherscan.session, 'get', wraps=mocked_request_dict) with mock_etherscan_transaction_response(rotki.etherscan, ethereum_accounts): # Check that we get transaction both when we query all accounts and each one individually response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), ) result = assert_proper_response_with_result(response) assert len(result['entries']) == 2 assert result['entries_found'] == 2 response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address=ethereum_accounts[0], ), ) result = assert_proper_response_with_result(response) assert len(result['entries']) == 1 assert result['entries_found'] == 2 response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address=ethereum_accounts[1], ), ) result = assert_proper_response_with_result(response) assert len(result['entries']) == 2 assert result['entries_found'] == 2
def test_simple_update(rotkehlchen_api_server, globaldb): """Test that the happy case of update works. - Test that up_to_version argument works - Test that only versions above current local are applied - Test that versions with min/max schema mismatch are skipped """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_3 = """INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0xC2FEC534c461c45533e142f724d0e3930650929c", 18, NULL);INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0xC2FEC534c461c45533e142f724d0e3930650929c", "C", "AKB token", "AKB",123, NULL, NULL, "AIDU", "0xC2FEC534c461c45533e142f724d0e3930650929c"); * INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * UPDATE assets SET name="Ευρώ" WHERE identifier="EUR"; INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("EUR", "A","Ευρώ","EUR",NULL, NULL,NULL,NULL, "EUR");INSERT INTO common_asset_details(asset_id, forked) VALUES("EUR", NULL); """ # noqa: E501 update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999996, updates={ "999999991": { "changes": 1, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999992": { "changes": 1, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999993": { "changes": 3, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999994": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION + 1, "max_schema_version": GLOBAL_DB_VERSION - 1, }, "999999995": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, "999999996": { "changes": 5, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, }, }, sql_actions={ "999999991": "", "999999992": "", "999999993": update_3, "999999994": "", "999999995": "" }, # noqa: E501 ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999992) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999992 assert result['remote'] == 999999996 assert result[ 'new_changes'] == 13 # changes from 99[3 + 4 + 6], skipping 5 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'up_to_version': 999999995 }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 1 assert 'Skipping assets update 999999994 since it requires a min schema of' in warnings[ 0] assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999995 new_token = EthereumToken('0xC2FEC534c461c45533e142f724d0e3930650929c') assert new_token.identifier == strethaddress_to_identifier( '0xC2FEC534c461c45533e142f724d0e3930650929c') # noqa: E501 assert new_token.name == 'AKB token' assert new_token.symbol == 'AKB' assert new_token.asset_type == AssetType.ETHEREUM_TOKEN assert new_token.started == 123 assert new_token.forked is None assert new_token.swapped_for is None assert new_token.coingecko is None assert new_token.cryptocompare == 'AIDU' assert new_token.ethereum_address == '0xC2FEC534c461c45533e142f724d0e3930650929c' assert new_token.decimals == 18 assert new_token.protocol is None new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' assert Asset('EUR').name == 'Ευρώ'
def test_query_transactions(rotkehlchen_api_server): """Test that querying the ethereum transactions endpoint works as expected This test uses real data. Found an ethereum address that has very few transactions and hopefully won't have more. If it does we can adjust the test. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Check that we get all transactions response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) expected_result = EXPECTED_AFB7_TXS + EXPECTED_4193_TXS expected_result.sort(key=lambda x: x['timestamp']) expected_result.reverse() assert result['entries'] == expected_result assert result['entries_found'] == len(expected_result) assert result['entries_limit'] == FREE_ETH_TX_LIMIT # Check that transactions per address and in a specific time range can be # queried and that this is from the DB and not etherscan def mock_etherscan_get(url, *args, **kwargs): # pylint: disable=unused-argument return MockResponse(200, "{}") etherscan_patch = patch.object(rotki.etherscan.session, 'get', wraps=mock_etherscan_get) with etherscan_patch as mock_call: response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address='0xaFB7ed3beBE50E0b62Fa862FAba93e7A46e59cA7', ), json={ 'async_query': async_query, "from_timestamp": 1461399856, "to_timestamp": 1494458860, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert mock_call.call_count == 0 assert result['entries'] == EXPECTED_AFB7_TXS[2:4][::-1]
def test_exchange_query_trades(rotkehlchen_api_server_with_exchanges): """Test that using the exchange trades query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query trades of one specific exchange with setup.binance_patch: response = requests.get( api_url_for( server, "tradesresource", ), json={'location': 'binance', 'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] > 0 assert result['entries_limit'] == FREE_TRADES_LIMIT assert_binance_trades_result([x['entry'] for x in result['entries']]) # query trades of all exchanges with setup.binance_patch, setup.polo_patch: response = requests.get( api_url_for(server, "tradesresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) trades = result['entries'] assert_binance_trades_result([x['entry'] for x in trades if x['entry']['location'] == 'binance']) # noqa: E501 assert_poloniex_trades_result([x['entry'] for x in trades if x['entry']['location'] == 'poloniex']) # noqa: E501 def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) trades = result['entries'] assert_binance_trades_result([x['entry'] for x in trades if x['entry']['location'] == 'binance']) # noqa: E501 assert_poloniex_trades_result( trades=[x['entry'] for x in trades if x['entry']['location'] == 'poloniex'], trades_to_check=(2,), ) # and now query them in a specific time range excluding two of poloniex's trades data = {'from_timestamp': 1499865548, 'to_timestamp': 1539713118, 'async_query': async_query} with setup.binance_patch, setup.polo_patch: response = requests.get(api_url_for(server, "tradesresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.binance_patch, setup.polo_patch: response = requests.get( api_url_for(server, "tradesresource") + '?' + urlencode(data)) assert_okay(response)
def test_query_asset_movements_over_limit( rotkehlchen_api_server_with_exchanges, start_with_valid_premium, ): """Test that using the asset movements query endpoint works fine""" start_ts = 0 end_ts = 1598453214 server = rotkehlchen_api_server_with_exchanges rotki = server.rest_api.rotkehlchen # Make sure online kraken is not queried by setting query ranges rotki.data.db.update_used_query_range( name='kraken_asset_movements', start_ts=start_ts, end_ts=end_ts, ) polo_entries_num = 4 # Set a ton of kraken asset movements in the DB kraken_entries_num = FREE_ASSET_MOVEMENTS_LIMIT + 50 movements = [AssetMovement( location=Location.KRAKEN, category=AssetMovementCategory.DEPOSIT, address=None, transaction_id=None, timestamp=x, asset=A_BTC, amount=FVal(x * 100), fee_asset=A_BTC, fee=FVal(x), link='') for x in range(kraken_entries_num) ] rotki.data.db.add_asset_movements(movements) all_movements_num = kraken_entries_num + polo_entries_num setup = prepare_rotki_for_history_processing_test(server.rest_api.rotkehlchen) # Check that querying movements with/without limits works even if we query two times for _ in range(2): # query asset movements of polo which has less movements than the limit with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={'location': 'poloniex'}, ) result = assert_proper_response_with_result(response) assert result['entries_found'] == all_movements_num assert result['entries_limit'] == -1 if start_with_valid_premium else FREE_ASSET_MOVEMENTS_LIMIT # noqa: E501 assert_poloniex_asset_movements([x['entry'] for x in result['entries']], deserialized=True) # now query kraken which has a ton of DB entries response = requests.get( api_url_for(server, "assetmovementsresource"), json={'location': 'kraken'}, ) result = assert_proper_response_with_result(response) if start_with_valid_premium: assert len(result['entries']) == kraken_entries_num assert result['entries_limit'] == -1 assert result['entries_found'] == all_movements_num else: assert len(result['entries']) == FREE_ASSET_MOVEMENTS_LIMIT - polo_entries_num assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT assert result['entries_found'] == all_movements_num
def test_two_vaults_same_account_same_collateral(rotkehlchen_api_server, ethereum_accounts): """Check that no events are duplicated between vaults for same collateral by same account Test for vaults side of https://github.com/rotki/rotki/issues/1032 """ rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { # proxy for 8632 and 8543 ethereum_accounts[0]: '0xAe9996b76bdAa003ace6D66328A6942565f5768d', } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) vaults = assert_proper_response_with_result(response) vault_8543 = { 'identifier': 8543, 'owner': ethereum_accounts[0], 'collateral_type': 'ETH-A', 'collateral_asset': 'ETH', 'collateral': { 'amount': '0', 'usd_value': '0', }, 'debt': { 'amount': '0', 'usd_value': '0', }, 'collateralization_ratio': None, 'liquidation_ratio': '145.00%', 'liquidation_price': None, 'stability_fee': '0.00%', } vault_8632 = { 'identifier': 8632, 'owner': ethereum_accounts[0], 'collateral_type': 'ETH-A', 'collateral_asset': 'ETH', 'collateral': { 'amount': '0', 'usd_value': '0', }, 'debt': { 'amount': '0', 'usd_value': '0', }, 'collateralization_ratio': None, 'liquidation_ratio': '145.00%', 'liquidation_price': None, 'stability_fee': '0.00%', } assert len(vaults) == 2 assert_serialized_dicts_equal(vaults[0], vault_8543, ignore_keys=['stability_fee']) assert_serialized_dicts_equal(vaults[1], vault_8632, ignore_keys=VAULT_IGNORE_KEYS) response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_8543_details = { 'identifier': 8543, 'collateral_asset': 'ETH', 'creation_ts': 1587910979, 'total_interest_owed': '0', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '1', 'usd_value': '197.78', }, 'timestamp': 1587910979, 'tx_hash': '0xf59858df4e42cdc2aecfebdcf38e1df841866c6a9eb3adb6bde9a844564a3bb6', }, { 'event_type': 'generate', 'value': { 'amount': '80', 'usd_value': '81.2', }, 'timestamp': 1587910979, 'tx_hash': '0xf59858df4e42cdc2aecfebdcf38e1df841866c6a9eb3adb6bde9a844564a3bb6', }, { 'event_type': 'payback', 'value': { 'amount': '80', 'usd_value': '80.24', }, 'timestamp': 1589989097, 'tx_hash': '0x52396f7d20db54e2e9e716698b643a39815ff149a6cccbe9c7597dc9e06bb9d3', }, { 'event_type': 'deposit', 'value': { 'amount': '3.5', 'usd_value': '734.475', }, 'timestamp': 1589993538, 'tx_hash': '0x3c3942dc40fe68303098d91e765ceecaed4664bba0ef8f8e684b6f0e61968c6c', }, { 'event_type': 'withdraw', 'value': { 'amount': '4.5', 'usd_value': '893.52', }, 'timestamp': 1590043499, 'tx_hash': '0xbcd4158f0089404f6ab5378517762cddc13d21c9d2fcf3fd45cf1cf4b656242c', }], } vault_8632_details = { 'identifier': 8632, 'collateral_asset': 'ETH', 'creation_ts': 1588174425, 'total_interest_owed': '0', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '2.4', 'usd_value': '517.32', }, 'timestamp': 1588174425, 'tx_hash': '0xdb677a4257b5bdb305c278102d7b2460408bb7a3981414b994f4dd80a737ac2a', }, { 'event_type': 'generate', 'value': { 'amount': '192', 'usd_value': '194.688', }, 'timestamp': 1588174425, 'tx_hash': '0xdb677a4257b5bdb305c278102d7b2460408bb7a3981414b994f4dd80a737ac2a', }, { 'event_type': 'payback', 'value': { 'amount': '192', 'usd_value': '192.192', }, 'timestamp': 1590042891, 'tx_hash': '0x488a937677030cc810d0062001c08c944ecf6329b24a45ae9480bada8147bf75', }, { 'event_type': 'deposit', 'value': { 'amount': '4.4', 'usd_value': '873.664', }, 'timestamp': 1590043699, 'tx_hash': '0x712ddb654b878bcb30c5344d7c18f7f796fe94abd6e5b8a22b2da0a6c99bb425', }, { 'event_type': 'generate', 'value': { 'amount': '429.79', 'usd_value': '430.21979', }, 'timestamp': 1590044118, 'tx_hash': '0x36bfa27e157c03393a8816f6c1e3e990474f8f7473413810d87e2f4981d58044', }], } details = assert_proper_response_with_result(response) assert len(details) == 2 assert_serialized_dicts_equal(details[0], vault_8543_details, ignore_keys=['stability_fee']) assert_serialized_dicts_equal( details[1], vault_8632_details, ignore_keys=[ 'total_interest_owed', 'total_liquidated_amount', 'total_liquidated_usd', 'stability_fee', ], # Checking only the first 5 events, since that's how many we had when the test was written length_list_keymap={'events': 5}, )
def test_query_vaults_usdc_strange(rotkehlchen_api_server, ethereum_accounts): """Strange case of a USDC vault that is not queried correctly https://oasis.app/borrow/7538?network=mainnet """ rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0x15fEaFd4358b8C03c889D6661b0CA1Be3389792F', # 7538 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) # That proxy has 3 vaults. We only want to test 7538, which is closed/repaid so just keep that vaults = [ x for x in assert_proper_response_with_result(response) if x['identifier'] == 7538 ] vault_7538 = MakerdaoVault( identifier=7538, owner=ethereum_accounts[0], collateral_type='USDC-A', urn='0x70E58566C7baB6faaFE03fbA69DF45Ef4f48223B', collateral_asset=A_USDC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal(1.1), liquidation_price=None, ) expected_vaults = [vault_7538.serialize()] assert_serialized_lists_equal(expected_vaults, vaults) # And also make sure that the internal mapping will only query details of 7538 makerdao_vaults = rotki.chain_manager.get_module('makerdao_vaults') makerdao_vaults.vault_mappings = {ethereum_accounts[0]: [vault_7538]} response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_7538_details = { 'identifier': 7538, 'collateral_asset': A_USDC.identifier, 'creation_ts': 1585145754, 'total_interest_owed': '0.0005943266', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '250.12', 'usd_value': '250.12', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'generate', 'value': { 'amount': '25', 'usd_value': '25', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'payback', 'value': { 'amount': '25.000248996', 'usd_value': '25.000248996', }, 'timestamp': 1588696496, 'tx_hash': '0x8bd960e7eb8b9e2b81d2446d1844dd63f94636c7800ea5e3b4d926ea0244c66c', }, { 'event_type': 'deposit', 'value': { 'amount': '0.0113', 'usd_value': '0.0113', }, 'timestamp': 1588720248, 'tx_hash': '0x678c4da562173c102473f1904ff293a767ebac9ec6c7d728ef2fd41acf00a13a', }], } details = assert_proper_response_with_result(response) expected_details = [vault_7538_details] assert_serialized_lists_equal(expected_details, details)
def test_query_vaults_wbtc(rotkehlchen_api_server, ethereum_accounts): """Check vault info and details for a vault with WBTC as collateral""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0x9684e6C1c7B79868839b27F88bA6d5A176367075', # 8913 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) # That proxy has 3 vaults. We only want to test 8913, which is closed/repaid so just keep that vaults = [ x for x in assert_proper_response_with_result(response) if x['identifier'] == 8913 ] vault_8913 = MakerdaoVault( identifier=8913, owner=ethereum_accounts[0], collateral_type='WBTC-A', urn='0x37f7B3C82A9Edc13FdCcE66E7d500b3698A13294', collateral_asset=A_WBTC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal(1.45), liquidation_price=None, stability_fee=FVal(0.02), ) expected_vaults = [vault_8913.serialize()] assert_serialized_lists_equal(expected_vaults, vaults, ignore_keys=['stability_fee']) # And also make sure that the internal mapping will only query details of 8913 makerdao_vaults = rotki.chain_manager.get_module('makerdao_vaults') makerdao_vaults.vault_mappings = {ethereum_accounts[0]: [vault_8913]} response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_8913_details = { 'identifier': 8913, 'collateral_asset': A_WBTC.identifier, 'creation_ts': 1588664698, 'total_interest_owed': '0.1903819198', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '0.011', 'usd_value': '87.06599', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'generate', 'value': { 'amount': '25', 'usd_value': '25.15', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'payback', 'value': { 'amount': '25.000248996', 'usd_value': '25.15025', }, 'timestamp': 1588696496, 'tx_hash': '0x8bd960e7eb8b9e2b81d2446d1844dd63f94636c7800ea5e3b4d926ea0244c66c', }, { 'event_type': 'deposit', 'value': { 'amount': '0.0113', 'usd_value': '89.440517', }, 'timestamp': 1588720248, 'tx_hash': '0x678c4da562173c102473f1904ff293a767ebac9ec6c7d728ef2fd41acf00a13a', }], # way too many events in the vault, so no need to check them all } details = assert_proper_response_with_result(response) assert len(details) == 1 assert_serialized_dicts_equal( details[0], vault_8913_details, # Checking only the first 4 events length_list_keymap={'events': 4}, )
def test_query_vaults_usdc(rotkehlchen_api_server, ethereum_accounts): """Check vault info and details for a vault with USDC as collateral""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0xBE79958661741079679aFf75DbEd713cE71a979d', # 7588 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) vaults = assert_proper_response_with_result(response) vault_7588 = MakerdaoVault( identifier=7588, owner=ethereum_accounts[0], collateral_type='USDC-A', urn='0x56D88244073B2fC17af5B1E6088936D5bAaDc37B', collateral_asset=A_USDC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal('1.03'), liquidation_price=None, stability_fee=FVal('0.04'), ) expected_vaults = [vault_7588.serialize()] assert_serialized_lists_equal( expected_vaults, vaults, ignore_keys=['stability_fee', 'liquidation_ratio'], ) response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_7588_details = { 'identifier': 7588, 'collateral_asset': A_USDC.identifier, 'creation_ts': 1585286480, 'total_interest_owed': '0.00050636718', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '45', 'usd_value': '45', }, 'timestamp': 1585286480, 'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6', }, { 'event_type': 'generate', 'value': { 'amount': '20', 'usd_value': '20.46', }, 'timestamp': 1585286480, 'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6', }, { 'event_type': 'generate', 'value': { 'amount': '15.99', 'usd_value': '16.35777', }, 'timestamp': 1585286769, 'tx_hash': '0xdb861c893a51e4649ff3740cd3658cd4c9b1d048d3b8b4d117f4319bd60aee01', }, { 'event_type': 'payback', 'value': { 'amount': '35.990506367', 'usd_value': '36.818288', }, 'timestamp': 1585290263, 'tx_hash': '0xdd7825fe4a93c6f1ffa25a91b6da2396c229fe16b17242ad5c0bf7962928b2ec', }, { 'event_type': 'withdraw', 'value': { 'amount': '45', 'usd_value': '45', }, 'timestamp': 1585290300, 'tx_hash': '0x97462ebba7ce2467787bf6de25a25c24e538cf8a647919112c5f048b6a293408', }], } details = assert_proper_response_with_result(response) expected_details = [vault_7588_details] assert_serialized_lists_equal(expected_details, details, ignore_keys=['liquidation_ratio'])
def test_query_vaults_details_liquidation(rotkehlchen_api_server, ethereum_accounts): """Check vault details of a vault with liquidations Also use three accounts, two of which have vaults associated with them to test that vaults for multiple accounts get detected """ rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0x689D4C2229717f877A644A0aAd742D67E5D0a2FB', ethereum_accounts[2]: '0x420F88De6dadA0a77Db7b9EdBe3A0C614346031E', } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) vaults = assert_proper_response_with_result(response) vault_6021 = { 'identifier': 6021, 'owner': ethereum_accounts[2], 'collateral_type': 'ETH-A', 'collateral_asset': 'ETH', 'collateral': { 'amount': ZERO, 'usd_value': ZERO, }, 'debt': { 'amount': ZERO, 'usd_value': ZERO, }, 'collateralization_ratio': None, 'liquidation_ratio': '145.00%', 'liquidation_price': None, 'stability_fee': '0.00%', } vault_8015_with_owner = VAULT_8015.copy() vault_8015_with_owner['owner'] = ethereum_accounts[0] assert_serialized_dicts_equal(vault_6021, vaults[0], ignore_keys=['stability_fee']) assert_serialized_dicts_equal( vault_8015_with_owner, vaults[1], ignore_keys=VAULT_IGNORE_KEYS, ) assert len(vaults) == 2 response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_6021_details = { 'identifier': 6021, 'collateral_asset': 'ETH', 'creation_ts': 1582699808, 'total_interest_owed': '-11078.655097848869', 'total_liquidated': { 'amount': '141.7', 'usd_value': '19191.848', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '140', 'usd_value': '31322.2', }, 'timestamp': 1582699808, 'tx_hash': '0x3246ef91fd3d6e1f7c5766de4fa1f0991ba67d92e518447ba8207fe98569c309', }, { 'event_type': 'generate', 'value': { 'amount': '14000', 'usd_value': '14028', }, 'timestamp': 1582699808, 'tx_hash': '0x3246ef91fd3d6e1f7c5766de4fa1f0991ba67d92e518447ba8207fe98569c309', }, { 'event_type': 'deposit', 'value': { 'amount': '1.7', 'usd_value': '331.262', }, 'timestamp': 1583958747, 'tx_hash': '0x65ac798cb9f22068e43fd9ef8303a31e436989062ae87e25650cc44c7788ab62', }, { 'event_type': 'payback', 'value': { 'amount': '2921.344902', 'usd_value': '2927.187591', }, 'timestamp': 1584024065, 'tx_hash': '0x6e44d22d6898ee012369787cd75ea6fb9ace6f995cd157675f370e8ba4a7b9ad', }, { 'event_type': 'liquidation', 'value': { 'amount': '50', 'usd_value': '6772', }, 'timestamp': 1584061534, 'tx_hash': '0xb02050d914ab40f59a9e07eb4f8161ce36eb97cea9c189b027eb1ceeac83a516', }, { 'event_type': 'liquidation', 'value': { 'amount': '50', 'usd_value': '6772', }, 'timestamp': 1584061897, 'tx_hash': '0x678f31d49dd70d76c0ce441343c0060dc600f4c8dbb4cee2b08c6b451b6097cd', }, { 'event_type': 'liquidation', 'value': { 'amount': '41.7', 'usd_value': '5647.848', }, 'timestamp': 1584061977, 'tx_hash': '0xded0f9de641087692555d92a7fa94fa9fa7abf22744b2d16c20a66c5e48a8edf', }], } details = assert_proper_response_with_result(response) assert len(details) == 2 assert_serialized_dicts_equal(vault_6021_details, details[0], ignore_keys=['stability_fee']) assert_serialized_dicts_equal( VAULT_8015_DETAILS, details[1], length_list_keymap={'events': 7}, ignore_keys=['total_interest_owed'], ) assert FVal(details[1]['total_interest_owed']) >= FVal( VAULT_8015_DETAILS['total_interest_owed'], )
def test_ignored_assets_modification(rotkehlchen_api_server_with_exchanges): """Test that using the ignored assets endpoint to modify the ignored assets list works fine""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen # add three assets to ignored assets kick_token_id = KICK_TOKEN.identifier ignored_assets = [A_GNO.identifier, A_RDN.identifier, 'XMR'] response = requests.put( api_url_for( rotkehlchen_api_server_with_exchanges, 'ignoredassetsresource', ), json={'assets': ignored_assets}, ) result = assert_proper_response_with_result(response) expected_ignored_assets = set(ignored_assets + [KICK_TOKEN.identifier]) assert expected_ignored_assets <= set(result) # check they are there assert set(rotki.data.db.get_ignored_assets()) >= expected_ignored_assets # Query for ignored assets and check that the response returns them response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, 'ignoredassetsresource', ), ) result = assert_proper_response_with_result(response) assert expected_ignored_assets <= set(result) # remove 3 assets from ignored assets response = requests.delete( api_url_for( rotkehlchen_api_server_with_exchanges, 'ignoredassetsresource', ), json={'assets': [A_GNO.identifier, 'XMR', kick_token_id]}, ) assets_after_deletion = {A_RDN.identifier} result = assert_proper_response_with_result(response) assert assets_after_deletion <= set(result) # check that the changes are reflected assert set(rotki.data.db.get_ignored_assets()) >= assets_after_deletion # Query for ignored assets and check that the response returns them response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, 'ignoredassetsresource', ), ) result = assert_proper_response_with_result(response) assert assets_after_deletion <= set(result) # Fetch remote assets to be ignored response = requests.post( api_url_for( rotkehlchen_api_server_with_exchanges, 'ignoredassetsresource', ), ) result = assert_proper_response_with_result(response) assert result >= 1 assert len(rotki.data.db.get_ignored_assets()) > len(assets_after_deletion) # Simulate remote error from cryptoscamdb with mock_cryptoscamdb_request(): response = requests.post( api_url_for( rotkehlchen_api_server_with_exchanges, 'ignoredassetsresource', ), ) assert response.status_code == HTTPStatus.BAD_GATEWAY
def test_delete_external_service(rotkehlchen_api_server): """Tests that delete external service credentials works""" # Add some data and see that the response shows they are added expected_result = { 'etherscan': { 'api_key': 'key1' }, 'cryptocompare': { 'api_key': 'key2' }, } data = { 'services': [ { 'name': 'etherscan', 'api_key': 'key1' }, { 'name': 'cryptocompare', 'api_key': 'key2' }, ] } response = requests.put( api_url_for(rotkehlchen_api_server, "externalservicesresource"), json=data, ) result = assert_proper_response_with_result(response) assert result == expected_result # Now try to delete an entry and see the response shows it's deleted data = {'services': ['etherscan']} del expected_result['etherscan'] response = requests.delete( api_url_for(rotkehlchen_api_server, "externalservicesresource"), json=data, ) result = assert_proper_response_with_result(response) assert result == expected_result # Query again and see that the modified services are returned response = requests.get( api_url_for(rotkehlchen_api_server, "externalservicesresource"), ) result = assert_proper_response_with_result(response) assert result == expected_result # Now try to delete an existing and a non-existing service to make sure # that if the service is not in the DB, deletion is silently ignored data = {'services': ['etherscan', 'cryptocompare']} response = requests.delete( api_url_for(rotkehlchen_api_server, "externalservicesresource"), json=data, ) result = assert_proper_response_with_result(response) assert result == {} # Query again and see that the modified services are returned response = requests.get( api_url_for(rotkehlchen_api_server, "externalservicesresource"), ) result = assert_proper_response_with_result(response) assert result == {}
def test_replace_asset_edge_cases(rotkehlchen_api_server, globaldb): """Test that the edge cases/errors are treated properly in the replace assets endpoint""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen cursor = rotki.data.db.conn.cursor() # Test that completely unknown source asset returns error notexisting_id = 'boo-boo-ga-ga' response = requests.put( api_url_for( rotkehlchen_api_server, 'assetsreplaceresource', ), json={ 'source_identifier': notexisting_id, 'target_asset': 'ICP' }, ) assert_error_response( response=response, contained_in_msg=f'Unknown asset {notexisting_id} provided', status_code=HTTPStatus.CONFLICT, ) # Test that trying to replace an asset that's used as a foreign key elsewhere in # the global DB does not work, error is returned and no changes happen # in the global DB and in the user DB glm_id = strethaddress_to_identifier( '0x7DD9c5Cba05E151C895FDe1CF355C9A1D5DA6429') balances: List[Dict[str, Any]] = [{ 'asset': glm_id, 'label': 'ICP account', 'amount': '50.315', 'location': 'blockchain', }] response = requests.put( api_url_for( rotkehlchen_api_server, 'manuallytrackedbalancesresource', ), json={ 'async_query': False, 'balances': balances }, ) assert_proper_response_with_result(response) global_cursor = globaldb.conn.cursor() def assert_db() -> None: assert global_cursor.execute( 'SELECT COUNT(*) FROM user_owned_assets WHERE asset_id=?', (glm_id, ), ).fetchone()[0] == 1 assert global_cursor.execute( 'SELECT COUNT(*) FROM assets WHERE swapped_for=?', (glm_id, ), ).fetchone()[0] == 1 assert cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', (glm_id, ), ).fetchone()[0] == 1 assert_db() response = requests.put( api_url_for( rotkehlchen_api_server, 'assetsreplaceresource', ), json={ 'source_identifier': glm_id, 'target_asset': 'ICP' }, ) assert_error_response( response=response, contained_in_msg='Tried to delete ethereum token with address', status_code=HTTPStatus.CONFLICT, ) assert_db() # Test non-string source identifier response = requests.put( api_url_for( rotkehlchen_api_server, 'assetsreplaceresource', ), json={ 'source_identifier': 55.1, 'target_asset': 'ICP' }, ) assert_error_response( response=response, contained_in_msg='Not a valid string', status_code=HTTPStatus.BAD_REQUEST, ) # Test unknown target asset response = requests.put( api_url_for( rotkehlchen_api_server, 'assetsreplaceresource', ), json={ 'source_identifier': 'ETH', 'target_asset': 'bobobobobo' }, ) assert_error_response( response=response, contained_in_msg='Unknown asset bobobobobo provided', status_code=HTTPStatus.BAD_REQUEST, ) # Test invalid target asset response = requests.put( api_url_for( rotkehlchen_api_server, 'assetsreplaceresource', ), json={ 'source_identifier': 'ETH', 'target_asset': 55 }, ) assert_error_response( response=response, contained_in_msg= 'Tried to initialize an asset out of a non-string identifier', status_code=HTTPStatus.BAD_REQUEST, )
def test_query_history(rotkehlchen_api_server_with_exchanges): """Test that the history processing REST API endpoint works. Similar to test_history.py""" async_query = random.choice([False, True]) start_ts = 0 end_ts = 1601040361 rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id, ) else: outcome = assert_proper_response_with_result(response) # Simply check that the results got returned here. The actual correctness of # accounting results is checked in other tests such as test_simple_accounting assert len(outcome) == 5 assert outcome['events_limit'] == FREE_PNL_EVENTS_LIMIT assert outcome['events_processed'] == 27 assert outcome['first_processed_timestamp'] == 1428994442 overview = outcome['overview'] assert len(overview) == 11 assert overview["loan_profit"] is not None assert overview["margin_positions_profit_loss"] is not None assert overview["settlement_losses"] is not None assert overview["ethereum_transaction_gas_costs"] is not None assert overview["asset_movement_fees"] is not None assert overview["general_trade_profit_loss"] is not None assert overview["taxable_trade_profit_loss"] is not None assert overview["total_taxable_profit_loss"] is not None assert overview["total_profit_loss"] is not None assert overview["defi_profit_loss"] is not None assert overview["ledger_actions_profit_loss"] is not None all_events = outcome['all_events'] assert isinstance(all_events, list) # TODO: These events are not actually checked anywhere for correctness # A test should probably be made for their correctness, even though # they are assumed correct if the overview is correct assert len(all_events) == 37 # And now make sure that warnings have also been generated for the query of # the unsupported/unknown assets warnings = rotki.msg_aggregator.consume_warnings() assert len(warnings) == 13 assert 'poloniex trade with unknown asset NOEXISTINGASSET' in warnings[0] assert 'poloniex trade with unsupported asset BALLS' in warnings[1] assert 'withdrawal of unknown poloniex asset IDONTEXIST' in warnings[2] assert 'withdrawal of unsupported poloniex asset DIS' in warnings[3] assert 'deposit of unknown poloniex asset IDONTEXIST' in warnings[4] assert 'deposit of unsupported poloniex asset EBT' in warnings[5] assert 'poloniex loan with unsupported asset BDC' in warnings[6] assert 'poloniex loan with unknown asset NOTEXISTINGASSET' in warnings[7] assert 'bittrex trade with unsupported asset PTON' in warnings[8] assert 'bittrex trade with unknown asset IDONTEXIST' in warnings[9] assert 'kraken trade with unknown asset IDONTEXISTTOO' in warnings[10] assert 'unknown kraken asset IDONTEXIST. Ignoring its deposit/withdrawals' in warnings[ 11] msg = 'unknown kraken asset IDONTEXISTEITHER. Ignoring its deposit/withdrawals query' assert msg in warnings[12] errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 4 assert 'bittrex trade with unprocessable pair %$#%$#%#$%' in errors[0] assert 'kraken trade with unprocessable pair IDONTEXISTZEUR' in errors[1] assert 'kraken trade with unprocessable pair %$#%$#%$#%$#%$#%' in errors[2] assert 'No documented acquisition found for RDN(0x255Aa6DF07540Cb5d3d297f0D0D4D84cb52bc8e6) before' in errors[ 3] # noqa: E501
def test_exporting_custom_assets_list(rotkehlchen_api_server, globaldb, with_custom_path): """Test that the endpoint for exporting custom assets works correctly""" eth_address = make_ethereum_address() identifier = ethaddress_to_identifier(eth_address) globaldb.add_asset( asset_id=identifier, asset_type=AssetType.ETHEREUM_TOKEN, data=EthereumToken.initialize( address=eth_address, decimals=18, name='yabirtoken', symbol='YAB', coingecko='YAB', cryptocompare='YAB', ), ) with tempfile.TemporaryDirectory() as path: if with_custom_path: response = requests.put( api_url_for( rotkehlchen_api_server, 'userassetsresource', ), json={ 'action': 'download', 'destination': path }, ) else: response = requests.put( api_url_for( rotkehlchen_api_server, 'userassetsresource', ), json={'action': 'download'}, ) if with_custom_path: result = assert_proper_response_with_result(response) if with_custom_path: assert path in result['file'] zip_file = ZipFile(result['file']) data = json.loads(zip_file.read('assets.json')) assert int(data['version']) == GLOBAL_DB_VERSION assert len(data['assets']) == 1 assert data['assets'][0] == { 'identifier': identifier, 'name': 'yabirtoken', 'decimals': 18, 'symbol': 'YAB', 'asset_type': 'ethereum token', 'started': None, 'forked': None, 'swapped_for': None, 'cryptocompare': 'YAB', 'coingecko': 'YAB', 'protocol': None, 'underlying_tokens': None, 'ethereum_address': eth_address, } else: assert response.status_code == HTTPStatus.OK assert response.headers['Content-Type'] == 'application/zip' # try to download again to see if the database is properly detached response = requests.put( api_url_for( rotkehlchen_api_server, 'userassetsresource', ), json={ 'action': 'download', 'destination': path }, ) result = assert_proper_response_with_result(response)
def test_reverse_ens(rotkehlchen_api_server): """Test that we can reverse resolve ENS names""" db = DBEns(rotkehlchen_api_server.rest_api.rotkehlchen.data.db) db_conn = rotkehlchen_api_server.rest_api.rotkehlchen.data.db.conn addrs_1 = [ to_checksum_address('0x9531c059098e3d194ff87febb587ab07b30b1306'), to_checksum_address('0x2b888954421b424c5d3d9ce9bb67c9bd47537d12'), ] response = requests.post( api_url_for( rotkehlchen_api_server, 'reverseensresource', ), json={'ethereum_addresses': addrs_1}, ) result = assert_proper_response_with_result(response) expected_resp_1 = { addrs_1[0]: 'rotki.eth', addrs_1[1]: 'lefteris.eth', } assert result == expected_resp_1 addrs_2 = [ to_checksum_address('0x9531c059098e3d194ff87febb587ab07b30b1306'), to_checksum_address('0xa4b73b39f73f73655e9fdc5d167c21b3fa4a1ed6'), to_checksum_address('0x71C7656EC7ab88b098defB751B7401B5f6d8976F'), ] timestamps_before_request = _get_timestamps(db, addrs_1) response = requests.post( api_url_for( rotkehlchen_api_server, 'reverseensresource', ), json={'ethereum_addresses': addrs_2}, ) result = assert_proper_response_with_result(response) all_addrs = list(set(addrs_1) | set(addrs_2)) expected_resp_2 = { addrs_2[0]: 'rotki.eth', addrs_2[1]: 'abc.eth', } assert result == expected_resp_2 timestamps_after_request = _get_timestamps(db, addrs_1) assert timestamps_before_request == timestamps_after_request # Going to check that after request with ignore_cache ens_mappings will be updated db_changes_before = db_conn.total_changes response = requests.post( api_url_for( rotkehlchen_api_server, 'reverseensresource', ), json={'ethereum_addresses': ['0xqwerty']}, ) assert_error_response( response=response, contained_in_msg='Given value 0xqwerty is not an ethereum address', status_code=HTTPStatus.BAD_REQUEST, ) requests.post( api_url_for( rotkehlchen_api_server, 'reverseensresource', ), json={'ethereum_addresses': all_addrs, 'ignore_cache': True}, ) db_changes_after = db_conn.total_changes # Check that we have 5 updates because we have 5 rows in ens_mappings table assert db_changes_after == 5 + db_changes_before
def test_query_asset_movements(rotkehlchen_api_server_with_exchanges): """Test that using the asset movements query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = prepare_rotki_for_history_processing_test(server.rest_api.rotkehlchen) # setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query asset movements of one specific exchange with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={'location': 'poloniex', 'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] == 4 assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT poloniex_ids = [x['entry']['identifier'] for x in result['entries']] assert_poloniex_asset_movements([x['entry'] for x in result['entries']], deserialized=True) assert all(x['ignored_in_accounting'] is False for x in result['entries']), 'ignored should be false' # noqa: E501 # now let's ignore all poloniex action ids response = requests.put( api_url_for( rotkehlchen_api_server_with_exchanges, "ignoredactionsresource", ), json={'action_type': 'asset movement', 'action_ids': poloniex_ids}, ) result = assert_proper_response_with_result(response) assert set(result['asset movement']) == set(poloniex_ids) # query asset movements of all exchanges with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements([x['entry'] for x in movements if x['entry']['location'] == 'poloniex'], True) # noqa: E501 assert_kraken_asset_movements([x['entry'] for x in movements if x['entry']['location'] == 'kraken'], True) # noqa: E501 def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( to_check_list=[x['entry'] for x in movements if x['entry']['location'] == 'poloniex'], deserialized=True, movements_to_check=(1, 2), ) msg = 'poloniex asset movements should have now been ignored for accounting' assert all(x['ignored_in_accounting'] is True for x in movements if x['entry']['location'] == 'poloniex'), msg # noqa: E501 assert_kraken_asset_movements( to_check_list=[x['entry'] for x in movements if x['entry']['location'] == 'kraken'], deserialized=True, movements_to_check=(0, 1, 2), ) # and now query them in a specific time range excluding some asset movements data = {'from_timestamp': 1439994442, 'to_timestamp': 1458994442, 'async_query': async_query} with setup.polo_patch: response = requests.get(api_url_for(server, "assetmovementsresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource") + '?' + urlencode(data)) assert_okay(response)
def test_get_events( rotkehlchen_api_server, ethereum_accounts, # pylint: disable=unused-argument rotki_premium_credentials, # pylint: disable=unused-argument start_with_valid_premium, # pylint: disable=unused-argument ): async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Set module premium is required for calling `get_balances()` premium = None if start_with_valid_premium: premium = Premium(rotki_premium_credentials) rotki.chain_manager.adex.premium = premium setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, original_queries=['zerion', 'logs', 'blocknobytime'], ) with ExitStack() as stack: # patch ethereum/etherscan to not autodetect tokens setup.enter_ethereum_patches(stack) response = requests.get( api_url_for(rotkehlchen_api_server, 'adexhistoryresource'), json={ 'async_query': async_query, 'to_timestamp': 1611747322 }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) identity_address = '0x2a6c38D16BFdc7b4a20f1F982c058F07BDCe9204' tom_pool_id = '0x2ce0c96383fb229d9776f33846e983a956a7d95844fac57b180ed0071d93bb28' bond_id = '0x540cab9883923c01e657d5da4ca5674b6e4626b4a148224635495502d674c7c5' channel_id = '0x30d87bab0ef1e7f8b4c3b894ca2beed41bbd54c481f31e5791c1e855c9dbf4ba' result = result[ADEX_TEST_ADDR] expected_events = [ Bond( tx_hash= '0x9989f47c6c0a761f98f910ac24e2438d858be96c12124a13be4bb4b3150c55ea', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1604366004, bond_id=bond_id, pool_id=tom_pool_id, value=Balance(FVal(100000), FVal(200000)), nonce=0, slashed_at=0, ), ChannelWithdraw( tx_hash= '0xa9ee91af823c0173fc5ada908ff9fe3f4d7c84a2c9da795f0889b3f4ace75b13', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1607453764, channel_id=channel_id, pool_id=tom_pool_id, value=Balance(FVal('5056.894263641728544592'), FVal('0')), token=None, ), Unbond( tx_hash= '0xa9ee91af823c0173fc5ada908ff9fe3f4d7c84a2c9da795f0889b3f4ace75b13', address=ADEX_TEST_ADDR, identity_address=identity_address, timestamp=1607453764, bond_id=bond_id, pool_id=tom_pool_id, value=Balance(FVal(100000), FVal(200000)), ) ] assert len(result['events']) == 8 assert result['events'][:len(expected_events)] == [ x.serialize() for x in expected_events ] assert 'staking_details' in result # Make sure events end up in the DB assert len(rotki.data.db.get_adex_events()) != 0 # test adex data purging from the db works response = requests.delete( api_url_for( rotkehlchen_api_server, 'namedethereummoduledataresource', module_name='adex', )) assert_simple_ok_response(response) assert len(rotki.data.db.get_adex_events()) == 0
def test_update_conflicts(rotkehlchen_api_server, globaldb): """Test that conflicts in an asset update are handled properly""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_1 = """INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0x6B175474E89094C44Da98b954EedeAC495271d0F", 8, "maker");INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F", "C", "New Multi Collateral DAI", "NDAI", 1573672677, NULL, "dai", NULL, "0x6B175474E89094C44Da98b954EedeAC495271d0F"); * INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("DASH", "B","Dash","DASH",1337, NULL, "dash-coingecko", NULL, "DASH");INSERT INTO common_asset_details(asset_id, forked) VALUES("DASH", "BTC"); * INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0x1B175474E89094C44Da98b954EedeAC495271d0F", 18, NULL); INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F", "C", "Conflicting token", "CTK", 1573672677, NULL, "ctk", NULL, "0x1B175474E89094C44Da98b954EedeAC495271d0F"); * """ # noqa: E501 globaldb.add_asset( # add a conflicting token asset_id='_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F', asset_type=AssetType.ETHEREUM_TOKEN, data=EthereumToken.initialize( address=ChecksumEthAddress( '0x1B175474E89094C44Da98b954EedeAC495271d0F'), decimals=12, name='Conflicting token', symbol='CTK', started=None, swapped_for=None, coingecko='ctk', cryptocompare=None, protocol=None, underlying_tokens=None, ), ) globaldb.add_user_owned_assets( [Asset('_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F')]) update_patch = mock_asset_updates( original_requests_get=requests.get, latest=999999991, updates={ "999999991": { "changes": 3, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, } }, sql_actions={"999999991": update_1}, ) globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999990) start_assets_num = len(globaldb.get_all_asset_data(mapping=False)) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) result = outcome['result'] assert outcome['message'] == '' else: result = assert_proper_response_with_result(response) assert result['local'] == 999999990 assert result['remote'] == 999999991 assert result['new_changes'] == 3 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome[ 'message'] == 'Found conflicts during assets upgrade' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='Found conflicts during assets upgrade', status_code=HTTPStatus.CONFLICT, ) # Make sure that nothing was committed assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999990 assert len( globaldb.get_all_asset_data(mapping=False)) == start_assets_num with pytest.raises(UnknownAsset): Asset('121-ada-FADS-as') errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' # See that we get 3 conflicts expected_result = [{ 'identifier': '_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F', 'local': { 'name': 'Multi Collateral Dai', 'symbol': 'DAI', 'asset_type': 'ethereum token', 'started': 1573672677, 'forked': None, 'swapped_for': None, 'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'dai', 'protocol': None, }, 'remote': { 'name': 'New Multi Collateral DAI', 'symbol': 'NDAI', 'asset_type': 'ethereum token', 'started': 1573672677, 'forked': None, 'swapped_for': None, 'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'decimals': 8, 'cryptocompare': None, 'coingecko': 'dai', 'protocol': 'maker', }, }, { 'identifier': 'DASH', 'local': { 'name': 'Dash', 'symbol': 'DASH', 'asset_type': 'own chain', 'started': 1390095618, 'forked': None, 'swapped_for': None, 'ethereum_address': None, 'decimals': None, 'cryptocompare': None, 'coingecko': 'dash', 'protocol': None, }, 'remote': { 'name': 'Dash', 'symbol': 'DASH', 'asset_type': 'own chain', 'started': 1337, 'forked': 'BTC', 'swapped_for': None, 'ethereum_address': None, 'decimals': None, 'cryptocompare': None, 'coingecko': 'dash-coingecko', 'protocol': None, }, }, { 'identifier': '_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F', 'local': { 'asset_type': 'ethereum token', 'coingecko': 'ctk', 'cryptocompare': None, 'decimals': 12, 'ethereum_address': '0x1B175474E89094C44Da98b954EedeAC495271d0F', 'forked': None, 'name': 'Conflicting token', 'protocol': None, 'started': None, 'swapped_for': None, 'symbol': 'CTK', }, 'remote': { 'asset_type': 'ethereum token', 'coingecko': 'ctk', 'cryptocompare': None, 'decimals': 18, 'ethereum_address': '0x1b175474E89094C44DA98B954EeDEAC495271d0f', 'forked': None, 'name': 'Conflicting token', 'protocol': None, 'started': 1573672677, 'swapped_for': None, 'symbol': 'CTK', }, }] assert result == expected_result # now try the update again but specify the conflicts resolution conflicts = { '_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F': 'remote', 'DASH': 'local', '_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F': 'remote' } # noqa: E501 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={ 'async_query': async_query, 'conflicts': conflicts }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server, task_id, ) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result( response, message='', status_code=HTTPStatus.OK, ) cursor = globaldb._conn.cursor() # check conflicts were solved as per the given choices and new asset also added assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999991 errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' dai = EthereumToken('0x6B175474E89094C44Da98b954EedeAC495271d0F') assert dai.identifier == strethaddress_to_identifier( '0x6B175474E89094C44Da98b954EedeAC495271d0F') # noqa: E501 assert dai.name == 'New Multi Collateral DAI' assert dai.symbol == 'NDAI' assert dai.asset_type == AssetType.ETHEREUM_TOKEN assert dai.started == 1573672677 assert dai.forked is None assert dai.swapped_for is None assert dai.coingecko == 'dai' assert dai.cryptocompare is None assert dai.ethereum_address == '0x6B175474E89094C44Da98b954EedeAC495271d0F' assert dai.decimals == 8 assert dai.protocol == 'maker' # make sure data is in both tables assert cursor.execute( 'SELECT COUNT(*) from ethereum_tokens WHERE address="0x6B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501 dash = Asset('DASH') assert dash.identifier == 'DASH' assert dash.name == 'Dash' assert dash.symbol == 'DASH' assert dash.asset_type == AssetType.OWN_CHAIN assert dash.started == 1390095618 assert dash.forked is None assert dash.swapped_for is None assert dash.coingecko == 'dash' assert dash.cryptocompare is None assert cursor.execute( 'SELECT COUNT(*) from common_asset_details WHERE asset_id="DASH";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="DASH";').fetchone( )[0] == 1 # noqa: E501 new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' assert cursor.execute( 'SELECT COUNT(*) from common_asset_details WHERE asset_id="121-ada-FADS-as";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="121-ada-FADS-as";' ).fetchone()[0] == 1 # noqa: E501 ctk = EthereumToken('0x1B175474E89094C44Da98b954EedeAC495271d0F') assert ctk.name == 'Conflicting token' assert ctk.symbol == 'CTK' assert ctk.asset_type == AssetType.ETHEREUM_TOKEN assert ctk.started == 1573672677 assert ctk.forked is None assert ctk.swapped_for is None assert ctk.coingecko == 'ctk' assert ctk.cryptocompare is None assert ctk.ethereum_address == '0x1B175474E89094C44Da98b954EedeAC495271d0F' assert ctk.decimals == 18 assert ctk.protocol is None assert cursor.execute( 'SELECT COUNT(*) from ethereum_tokens WHERE address="0x1B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501 assert cursor.execute( 'SELECT COUNT(*) from assets WHERE identifier="_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F";' ).fetchone()[0] == 1 # noqa: E501
def test_editing_custom_assets(rotkehlchen_api_server, globaldb): """Test that the endpoint for editing a custom asset works""" custom1 = { 'asset_type': 'own chain', 'name': 'foo token', 'symbol': 'FOO', 'started': 5, } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom1, ) result = assert_proper_response_with_result(response) custom1_id = result['identifier'] data = globaldb.get_asset_data(identifier=custom1_id, form_with_incomplete_data=False) assert data.identifier == custom1_id assert data.asset_type == AssetType.OWN_CHAIN assert data.name == custom1['name'] assert data.symbol == custom1['symbol'] assert data.started == custom1['started'] custom1_v2 = { 'identifier': custom1_id, 'asset_type': 'stellar token', 'name': 'goo token', 'symbol': 'GOO', 'started': 6, 'forked': custom1_id, 'swapped_for': 'ETH', 'coingecko': 'internet-computer', 'cryptocompare': 'ICP', } response = requests.patch( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom1_v2, ) result = assert_proper_response_with_result(response) assert result is True data = globaldb.get_asset_data(identifier=custom1_id, form_with_incomplete_data=False) assert data.identifier == custom1_id assert data.asset_type == AssetType.STELLAR_TOKEN assert data.name == custom1_v2['name'] assert data.symbol == custom1_v2['symbol'] assert data.started == custom1_v2['started'] assert data.forked == custom1_v2['forked'] assert data.swapped_for == custom1_v2['swapped_for'] assert data.coingecko == custom1_v2['coingecko'] assert data.cryptocompare == custom1_v2['cryptocompare'] # try to edit an asset with a non-existing identifier bad_asset = { 'identifier': 'notexisting', 'asset_type': 'own chain', 'name': 'Euro', 'symbol': 'EUR', } response = requests.patch( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = 'Tried to edit non existing asset with identifier notexisting' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.CONFLICT, ) # try to edit an ethereum token with the custom asset endpoint bad_asset = { 'identifier': 'EUR', 'asset_type': 'ethereum token', 'name': 'ethereum Euro', 'symbol': 'EUR', } response = requests.patch( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = 'Asset type ethereum token is not allowed in this endpoint' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.BAD_REQUEST, ) # try to edit non existing forked and swapped for bad_asset = { 'identifier': 'EUR', 'asset_type': 'omni token', 'name': 'Euro', 'symbol': 'EUR', 'forked': 'dsadsadsadasd', 'swapped_for': 'asdsadsad', } response = requests.patch( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = 'Unknown asset' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.BAD_REQUEST, ) # try to edit invalid coingecko bad_id = 'dsadsad' bad_asset = { 'identifier': 'EUR', 'asset_type': 'omni token', 'name': 'Euro', 'symbol': 'EUR', 'coingecko': bad_id, } response = requests.patch( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = f'Given coingecko identifier {bad_id} is not valid' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.BAD_REQUEST, ) # try to add invalid cryptocompare bad_id = 'dsadsad' bad_asset = { 'identifier': 'EUR', 'asset_type': 'omni token', 'name': 'Euro', 'symbol': 'EUR', 'cryptocompare': bad_id, } response = requests.patch( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = f'Given cryptocompare identifier {bad_id} isnt valid' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.BAD_REQUEST, )
def test_update_from_early_clean_db(rotkehlchen_api_server, globaldb): """ Test that if the asset upgrade happens from a very early DB that has had no assets version key set we still upgrade properly and set the assets version properly. """ rotki = rotkehlchen_api_server.rest_api.rotkehlchen update_1 = """INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC"); * UPDATE assets SET swapped_for="_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972" WHERE identifier="_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d"; INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0xa74476443119A942dE498590Fe1f2454d7D4aC0d", 18, NULL);INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d", "C", "Golem", "GNT", 1478810650, "_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972", "golem", NULL, "0xa74476443119A942dE498590Fe1f2454d7D4aC0d"); """ # noqa: E501 update_patch = mock_asset_updates( original_requests_get=requests.get, latest=1, updates={ "1": { "changes": 2, "min_schema_version": GLOBAL_DB_VERSION, "max_schema_version": GLOBAL_DB_VERSION, } }, sql_actions={"1": update_1}, ) cursor = globaldb._conn.cursor() cursor.execute(f'DELETE FROM settings WHERE name="{ASSETS_VERSION_KEY}"') start_assets_num = len(globaldb.get_all_asset_data(mapping=False)) with update_patch: response = requests.get( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), ) result = assert_proper_response_with_result(response) assert result['local'] == 0 assert result['remote'] == 1 assert result['new_changes'] == 2 response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), ) result = assert_proper_response_with_result( response, message='Found conflicts during assets upgrade', status_code=HTTPStatus.CONFLICT, ) # Make sure that nothing was committed assert globaldb.get_setting_value(ASSETS_VERSION_KEY, 0) == 0 assert len( globaldb.get_all_asset_data(mapping=False)) == start_assets_num with pytest.raises(UnknownAsset): Asset('121-ada-FADS-as') errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 0, f'Found warnings: {warnings}' # See that we get a conflict expected_result = [{ 'identifier': '_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'local': { 'name': 'Golem', 'symbol': 'GNT', 'asset_type': 'ethereum token', 'started': 1478810650, 'forked': None, 'swapped_for': '_ceth_0x7DD9c5Cba05E151C895FDe1CF355C9A1D5DA6429', 'ethereum_address': '0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'golem', 'protocol': None, }, 'remote': { 'name': 'Golem', 'symbol': 'GNT', 'asset_type': 'ethereum token', 'started': 1478810650, 'forked': None, 'swapped_for': '_ceth_0xA8d35739EE92E69241A2Afd9F513d41021A07972', 'ethereum_address': '0xa74476443119A942dE498590Fe1f2454d7D4aC0d', 'decimals': 18, 'cryptocompare': None, 'coingecko': 'golem', 'protocol': None, }, }] assert result == expected_result # now try the update again but specify the conflicts resolution conflicts = { '_ceth_0xa74476443119A942dE498590Fe1f2454d7D4aC0d': 'remote' } response = requests.post( api_url_for( rotkehlchen_api_server, 'assetupdatesresource', ), json={'conflicts': conflicts}, ) result = assert_proper_response_with_result( response, message='', status_code=HTTPStatus.OK, ) # check new asset was added and conflict was ignored with an error due to # inability to do anything with the missing swapped_for assert result is True assert globaldb.get_setting_value(ASSETS_VERSION_KEY, 0) == 1 gnt = EthereumToken('0xa74476443119A942dE498590Fe1f2454d7D4aC0d') assert gnt.identifier == strethaddress_to_identifier( '0xa74476443119A942dE498590Fe1f2454d7D4aC0d') # noqa: E501 assert gnt.name == 'Golem' assert gnt.symbol == 'GNT' assert gnt.asset_type == AssetType.ETHEREUM_TOKEN assert gnt.started == 1478810650 assert gnt.forked is None assert gnt.swapped_for == A_GLM.identifier assert gnt.coingecko == 'golem' assert gnt.cryptocompare is None assert gnt.ethereum_address == '0xa74476443119A942dE498590Fe1f2454d7D4aC0d' assert gnt.decimals == 18 assert gnt.protocol is None new_asset = Asset('121-ada-FADS-as') assert new_asset.identifier == '121-ada-FADS-as' assert new_asset.name == 'A name' assert new_asset.symbol == 'SYMBOL' assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN assert new_asset.started is None assert new_asset.forked == 'BTC' assert new_asset.swapped_for is None assert new_asset.coingecko == '' assert new_asset.cryptocompare == '' errors = rotki.msg_aggregator.consume_errors() warnings = rotki.msg_aggregator.consume_warnings() assert len(errors) == 0, f'Found errors: {errors}' assert len(warnings) == 1 assert f'Failed to resolve conflict for {gnt.identifier} in the DB during the v1 assets update. Skipping entry' in warnings[ 0] # noqa: E501
def test_deleting_custom_assets(rotkehlchen_api_server, globaldb): """Test that the endpoint for deleting a custom asset works""" custom1 = { 'asset_type': 'own chain', 'name': 'foo token', 'symbol': 'FOO', 'started': 5, } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom1, ) result = assert_proper_response_with_result(response) custom1_id = result['identifier'] custom2 = { 'asset_type': 'stellar token', 'name': 'goo token', 'symbol': 'GOO', 'started': 6, 'forked': custom1_id, 'swapped_for': 'ETH', 'coingecko': 'internet-computer', 'cryptocompare': 'ICP', } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom2, ) result = assert_proper_response_with_result(response) custom2_id = result['identifier'] custom3 = { 'asset_type': 'own chain', 'name': 'boo token', 'symbol': 'BOO', } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom3, ) result = assert_proper_response_with_result(response) custom3_id = result['identifier'] # Delete custom 3 and assert it works response = requests.delete( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json={'identifier': custom3_id}, ) result = assert_proper_response_with_result(response) assert result is True assert globaldb.get_asset_data(identifier=custom3_id, form_with_incomplete_data=False) is None # Try to delete custom1 but make sure it fails. It's used by custom2 response = requests.delete( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json={'identifier': custom1_id}, ) expected_msg = 'Tried to delete asset with name "foo token" and symbol "FOO" but its deletion would violate a constraint so deletion failed' # noqa: E501 assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.CONFLICT, ) # Delete custom 2 and assert it works response = requests.delete( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json={'identifier': custom2_id}, ) result = assert_proper_response_with_result(response) assert result is True assert globaldb.get_asset_data(identifier=custom2_id, form_with_incomplete_data=False) is None # now custom 1 should be deletable response = requests.delete( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json={'identifier': custom1_id}, ) result = assert_proper_response_with_result(response) assert result is True assert globaldb.get_asset_data(identifier=custom1_id, form_with_incomplete_data=False) is None # Make sure that deleting unknown asset is detected response = requests.delete( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json={'identifier': 'notexisting'}, ) expected_msg = 'Tried to delete asset with identifier notexisting but it was not found in the DB' # noqa: E501 assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.CONFLICT, )
def test_query_transactions_over_limit( rotkehlchen_api_server, ethereum_accounts, start_with_valid_premium, ): start_ts = 0 end_ts = 1598453214 rotki = rotkehlchen_api_server.rest_api.rotkehlchen db = rotki.data.db all_transactions_num = FREE_ETH_TX_LIMIT + 50 transactions = [ EthereumTransaction( tx_hash=x.to_bytes(2, byteorder='little'), timestamp=x, block_number=x, from_address=ethereum_accounts[0], to_address=make_ethereum_address(), value=x, gas=x, gas_price=x, gas_used=x, input_data=x.to_bytes(2, byteorder='little'), nonce=x, ) for x in range(FREE_ETH_TX_LIMIT - 10) ] transactions.extend([ EthereumTransaction( tx_hash=(x + 500).to_bytes(2, byteorder='little'), timestamp=x, block_number=x, from_address=ethereum_accounts[1], to_address=make_ethereum_address(), value=x, gas=x, gas_price=x, gas_used=x, input_data=x.to_bytes(2, byteorder='little'), nonce=x, ) for x in range(60) ]) db.add_ethereum_transactions(transactions, from_etherscan=True) # Also make sure to update query ranges so as not to query etherscan at all for address in ethereum_accounts: DBQueryRanges(db).update_used_query_range( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ranges_to_query=[], ) free_expected_entries = [FREE_ETH_TX_LIMIT - 10, 10] premium_expected_entries = [FREE_ETH_TX_LIMIT - 10, 60] # Check that we get all transactions correctly even if we query two times for _ in range(2): for idx, address in enumerate(ethereum_accounts): response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'address': address }, ) result = assert_proper_response_with_result(response) if start_with_valid_premium: assert len(result['entries']) == premium_expected_entries[idx] assert result['entries_found'] == all_transactions_num assert result['entries_limit'] == -1 else: assert len(result['entries']) == free_expected_entries[idx] assert result['entries_found'] == all_transactions_num assert result['entries_limit'] == FREE_ETH_TX_LIMIT
def test_replace_asset(rotkehlchen_api_server, globaldb, only_in_globaldb): """Test that the endpoint for replacing an asset identifier works Test for both an asset owned by the user and not (the only_in_globaldb case) """ rotki = rotkehlchen_api_server.rest_api.rotkehlchen cursor = rotki.data.db.conn.cursor() custom1 = { 'asset_type': 'own chain', 'name': 'Dfinity token', 'symbol': 'ICP', 'started': 5, } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom1, ) result = assert_proper_response_with_result(response) custom1_id = result['identifier'] if only_in_globaldb: cursor.execute('DELETE FROM assets where identifier=?', (custom1_id, )) balances: List[Dict[str, Any]] = [{ 'asset': custom1_id, 'label': 'ICP account', 'amount': '50.315', 'location': 'blockchain', 'balance_type': 'asset', }] expected_balances = deepcopy(balances) expected_balances[0]['usd_value'] = str( FVal(balances[0]['amount']) * FVal('1.5')) expected_balances[0]['tags'] = None expected_balances[0]['id'] = 1 if not only_in_globaldb: response = requests.put( api_url_for( rotkehlchen_api_server, 'manuallytrackedbalancesresource', ), json={ 'async_query': False, 'balances': balances }, ) assert_proper_response_with_result(response) # before the replacement. Check that we got a globaldb entry in owned assets global_cursor = globaldb.conn.cursor() if not only_in_globaldb: assert global_cursor.execute( 'SELECT COUNT(*) FROM user_owned_assets WHERE asset_id=?', (custom1_id, ), ).fetchone()[0] == 1 # check the custom asset is in user db assert cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', (custom1_id, ), ).fetchone()[0] == 1 # Check that the manual balance is returned response = requests.get( api_url_for( rotkehlchen_api_server, 'manuallytrackedbalancesresource', ), json={'async_query': False}, ) result = assert_proper_response_with_result(response) assert result['balances'] == expected_balances assert cursor.execute( 'SELECT COUNT(*) from manually_tracked_balances WHERE asset=?;', (custom1_id, ), ).fetchone()[0] == 1 response = requests.put( api_url_for( rotkehlchen_api_server, 'assetsreplaceresource', ), json={ 'source_identifier': custom1_id, 'target_asset': 'ICP' }, ) assert_simple_ok_response(response) # after the replacement. Check that the manual balance is changed if not only_in_globaldb: response = requests.get( api_url_for( rotkehlchen_api_server, 'manuallytrackedbalancesresource', ), json={'async_query': False}, ) result = assert_proper_response_with_result(response) expected_balances[0]['asset'] = 'ICP' assert result['balances'] == expected_balances # check the previous asset is not in userdb anymore assert cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', (custom1_id, ), ).fetchone()[0] == 0 assert cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', ('ICP', ), ).fetchone()[0] == 1 assert cursor.execute( 'SELECT COUNT(*) from manually_tracked_balances WHERE asset=?;', (custom1_id, ), ).fetchone()[0] == 0 assert cursor.execute( 'SELECT COUNT(*) from manually_tracked_balances WHERE asset=?;', ('ICP', ), ).fetchone()[0] == 1 # check the previous asset is not in globaldb owned assets assert global_cursor.execute( 'SELECT COUNT(*) FROM user_owned_assets WHERE asset_id=?', (custom1_id, ), ).fetchone()[0] == 0 # check the previous asset is not in globaldb assert global_cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', (custom1_id, ), ).fetchone()[0] == 0
def test_query_transactions_removed_address( rotkehlchen_api_server, ethereum_accounts, ): """Make sure that if an address is removed so are the transactions from the DB""" start_ts = 0 end_ts = 1598453214 rotki = rotkehlchen_api_server.rest_api.rotkehlchen db = rotki.data.db transactions = [ EthereumTransaction( tx_hash=b'1', timestamp=0, block_number=0, from_address=ethereum_accounts[0], to_address=make_ethereum_address(), value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=0, ), EthereumTransaction( tx_hash=b'2', timestamp=0, block_number=0, from_address=ethereum_accounts[0], to_address=make_ethereum_address(), value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=1, ), EthereumTransaction( # should remain after deletining account[0] tx_hash=b'3', timestamp=0, block_number=0, from_address=make_ethereum_address(), to_address=ethereum_accounts[1], value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=55, ), EthereumTransaction( # should remain after deletining account[0] tx_hash=b'4', timestamp=0, block_number=0, from_address=ethereum_accounts[1], to_address=ethereum_accounts[0], value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=0, ), EthereumTransaction( # should remain after deletining account[0] tx_hash=b'5', timestamp=0, block_number=0, from_address=ethereum_accounts[0], to_address=ethereum_accounts[1], value=1, gas=1, gas_price=1, gas_used=1, input_data=b'', nonce=0, ) ] db.add_ethereum_transactions(transactions, from_etherscan=True) # Also make sure to update query ranges so as not to query etherscan at all for address in ethereum_accounts: DBQueryRanges(db).update_used_query_range( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ranges_to_query=[], ) # Now remove the first account (do the mocking to not query etherscan for balances) setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=[], eth_balances=['10000', '10000'], ) with ExitStack() as stack: setup.enter_ethereum_patches(stack) response = requests.delete(api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='ETH', ), json={'accounts': [ethereum_accounts[0]]}) assert_proper_response_with_result(response) # Check that only the 3 remanining transactions from the other account is returned response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), ) result = assert_proper_response_with_result(response) assert len(result['entries']) == 3 assert result['entries_found'] == 3
def test_replace_asset_not_in_globaldb(rotkehlchen_api_server, globaldb): """Test that the endpoint for replacing an asset identifier works even if the source asset identifier is not in the global DB""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Emulate some custom state that can be reached if somehow you end up with a user # DB asset that is not in the global DB unknown_id = 'foo-boo-goo-doo' cursor = rotki.data.db.conn.cursor() cursor.execute('INSERT INTO assets VALUES(?)', (unknown_id, )) cursor.execute( 'INSERT INTO manually_tracked_balances(asset, label, amount, location) ' 'VALUES (?, ?, ?, ?)', (unknown_id, 'forgotten balance', '1', 'A'), ) assert cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', (unknown_id, ), ).fetchone()[0] == 1 # Check that the manual balance is there -- can't query normally due to unknown asset assert cursor.execute( 'SELECT COUNT(*) FROM manually_tracked_balances WHERE asset=?', (unknown_id, ), ).fetchone()[0] == 1 # now do the replacement response = requests.put( api_url_for( rotkehlchen_api_server, 'assetsreplaceresource', ), json={ 'source_identifier': unknown_id, 'target_asset': 'ICP' }, ) assert_simple_ok_response(response) # after the replacement. Check that the manual balance is changed an is now queriable response = requests.get( api_url_for( rotkehlchen_api_server, 'manuallytrackedbalancesresource', ), json={'async_query': False}, ) result = assert_proper_response_with_result(response) assert result['balances'] == [{ 'id': 1, 'asset': 'ICP', 'label': 'forgotten balance', 'amount': '1', 'usd_value': '1.5', 'tags': None, 'location': 'external', 'balance_type': 'asset', }] # check the previous asset is not in globaldb owned assets global_cursor = globaldb.conn.cursor() assert global_cursor.execute( 'SELECT COUNT(*) FROM user_owned_assets WHERE asset_id=?', (unknown_id, ), ).fetchone()[0] == 0 # check the previous asset is not in globaldb assert global_cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', (unknown_id, ), ).fetchone()[0] == 0 # check the previous asset is not in userdb anymore assert cursor.execute( 'SELECT COUNT(*) FROM assets WHERE identifier=?', (unknown_id, ), ).fetchone()[0] == 0
def test_adding_custom_assets(rotkehlchen_api_server, globaldb): """Test that the endpoint for adding a custom asset works""" custom1 = { 'asset_type': 'own chain', 'name': 'foo token', 'symbol': 'FOO', 'started': 5, } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom1, ) result = assert_proper_response_with_result(response) custom1_id = result['identifier'] data = globaldb.get_asset_data(identifier=custom1_id, form_with_incomplete_data=False) assert data.identifier == custom1_id assert data.asset_type == AssetType.OWN_CHAIN assert data.name == custom1['name'] assert data.symbol == custom1['symbol'] assert data.started == custom1['started'] custom2 = { 'asset_type': 'stellar token', 'name': 'goo token', 'symbol': 'GOO', 'started': 6, 'forked': custom1_id, 'swapped_for': 'ETH', 'coingecko': 'goo-token', 'cryptocompare': 'GOO', } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=custom2, ) result = assert_proper_response_with_result(response) custom2_id = result['identifier'] data = globaldb.get_asset_data(identifier=custom2_id, form_with_incomplete_data=False) assert data.identifier == custom2_id assert data.asset_type == AssetType.STELLAR_TOKEN assert data.name == custom2['name'] assert data.symbol == custom2['symbol'] assert data.started == custom2['started'] assert data.forked == custom2['forked'] assert data.swapped_for == custom2['swapped_for'] assert data.coingecko == custom2['coingecko'] assert data.cryptocompare == custom2['cryptocompare'] # try to add a token type/name/symbol combo that exists bad_asset = { 'asset_type': 'fiat', 'name': 'Euro', 'symbol': 'EUR', } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = 'Failed to add fiat Euro since it already exists. Existing ids: EUR' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.CONFLICT, ) # try to add an ethereum token with the custom asset endpoint bad_asset = { 'asset_type': 'ethereum token', 'name': 'Euro', 'symbol': 'EUR', } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = 'Asset type ethereum token is not allowed in this endpoint' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.BAD_REQUEST, ) # try to add non existing forked and swapped for bad_asset = { 'asset_type': 'omni token', 'name': 'Euro', 'symbol': 'EUR', 'forked': 'dsadsadsadasd', 'swapped_for': 'asdsadsad', } response = requests.put( api_url_for( rotkehlchen_api_server, 'allassetsresource', ), json=bad_asset, ) expected_msg = 'Unknown asset' assert_error_response( response=response, contained_in_msg=expected_msg, status_code=HTTPStatus.BAD_REQUEST, )
def test_query_transactions(rotkehlchen_api_server): """Test that querying the ethereum transactions endpoint works as expected This test uses real data. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Check that we get all transactions response = requests.get( api_url_for( rotkehlchen_api_server, 'ethereumtransactionsresource', ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) expected_result = EXPECTED_AFB7_TXS + EXPECTED_4193_TXS expected_result.sort(key=lambda x: x['timestamp']) expected_result.reverse() # Make sure that all of the transactions we expect are there and in order # There can be more transactions (since the address can make more) # but this check ignores them previous_index = 0 result_entries = [x['entry'] for x in result['entries']] assert all(x['ignored_in_accounting'] is False for x in result['entries'] ), 'by default nothing should be ignored' # noqa: E501 for entry in expected_result: assert entry in result_entries entry_idx = result_entries.index(entry) if previous_index != 0: assert entry_idx == previous_index + 1 previous_index = entry_idx assert result['entries_found'] >= len(expected_result) assert result['entries_limit'] == FREE_ETH_TX_LIMIT # now let's ignore two transactions ignored_ids = [ EXPECTED_AFB7_TXS[2]['tx_hash'] + EXPECTED_AFB7_TXS[2]['from_address'] + str(EXPECTED_AFB7_TXS[2]['nonce']), # noqa: E501 EXPECTED_AFB7_TXS[3]['tx_hash'] + EXPECTED_AFB7_TXS[3]['from_address'] + str(EXPECTED_AFB7_TXS[3]['nonce']), # noqa: E501 ] response = requests.put( api_url_for( rotkehlchen_api_server, "ignoredactionsresource", ), json={ 'action_type': 'ethereum transaction', 'action_ids': ignored_ids }, ) result = assert_proper_response_with_result(response) assert result == {'ethereum transaction': ignored_ids} # Check that transactions per address and in a specific time range can be # queried and that this is from the DB and not etherscan def mock_etherscan_get(url, *args, **kwargs): # pylint: disable=unused-argument return MockResponse(200, "{}") etherscan_patch = patch.object(rotki.etherscan.session, 'get', wraps=mock_etherscan_get) with etherscan_patch as mock_call: response = requests.get( api_url_for( rotkehlchen_api_server, 'per_address_ethereum_transactions_resource', address='0xaFB7ed3beBE50E0b62Fa862FAba93e7A46e59cA7', ), json={ 'async_query': async_query, "from_timestamp": 1461399856, "to_timestamp": 1494458860, }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server, task_id) assert outcome['message'] == '' result = outcome['result'] else: result = assert_proper_response_with_result(response) assert mock_call.call_count == 0 result_entries = [x['entry'] for x in result['entries']] assert result_entries == EXPECTED_AFB7_TXS[2:4][::-1] msg = 'the transactions we ignored have not been ignored for accounting' assert all(x['ignored_in_accounting'] is True for x in result['entries']), msg