def test_query_history_timerange(rotkehlchen_api_server_with_exchanges): """Same as test_query_history but on a limited timerange""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen start_ts = 1539713237 end_ts = 1539713238 setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, history_start_ts=start_ts, history_end_ts=end_ts, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts }, ) # Simply check that the results got returned here. The actual correctness of # accounting results is checked in other tests such as test_simple_accounting assert_proper_response(response) data = response.json() assert data['message'] == '' assert len(data['result']) == 2 overview = data['result']['overview'] assert len(overview) == 11 assert overview["loan_profit"] is not None assert overview["margin_positions_profit_loss"] is not None assert overview["settlement_losses"] is not None assert overview["ethereum_transaction_gas_costs"] is not None assert overview["asset_movement_fees"] is not None assert overview["general_trade_profit_loss"] is not None assert overview["taxable_trade_profit_loss"] is not None assert overview["total_taxable_profit_loss"] is not None assert overview["total_profit_loss"] is not None assert overview["defi_profit_loss"] is not None assert overview["ledger_actions_profit_loss"] is not None all_events = data['result']['all_events'] assert isinstance(all_events, list) assert len(all_events) == 4 response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "periodicdataresource"), ) assert_proper_response(response) data = response.json() assert data['result']['last_balance_save'] == 0 assert data['result']['eth_node_connection'] is False assert data['result']['history_process_start_ts'] == 1428994442 assert data['result']['history_process_current_ts'] == end_ts
def test_history_export_csv_errors( rotkehlchen_api_server_with_exchanges, tmpdir_factory, ): """Test that errors on the csv export REST API endpoint are handled correctly""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, ) csv_dir = str(tmpdir_factory.mktemp('test_csv_dir')) # Query the export endpoint without first having queried the history response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyexportingresource"), json={'directory_path': csv_dir}, ) assert_error_response( response=response, contained_in_msg='No history processed in order to perform an export', status_code=HTTPStatus.CONFLICT, ) # Now, query history processing to have data for exporting with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), ) assert_proper_response(response) # And now provide non-existing path for directory response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyexportingresource"), json={'directory_path': '/idont/exist/for/sure/'}, ) assert_error_response( response=response, contained_in_msg='"directory_path": ["Given path /idont/exist/for/sure/ does not exist', status_code=HTTPStatus.BAD_REQUEST, ) # And now provide valid path but not directory tempfile = Path(Path(csv_dir) / 'f.txt') tempfile.touch() response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyexportingresource"), json={'directory_path': str(tempfile)}, ) assert_error_response( response=response, contained_in_msg='is not a directory', status_code=HTTPStatus.BAD_REQUEST, )
def query_api_create_and_get_report( server, start_ts: Timestamp, end_ts: Timestamp, prepare_mocks: bool, events_offset: Optional[int] = None, events_limit: Optional[int] = None, events_ascending_timestamp: bool = False, ): async_query = random.choice([False, True]) rotki = server.rest_api.rotkehlchen setup = None if prepare_mocks: setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, history_start_ts=start_ts, history_end_ts=end_ts, ) # Query history processing to start the history processing with ExitStack() as stack: if setup is not None: for manager in setup: stack.enter_context(manager) response = requests.get( api_url_for(server, 'historyprocessingresource'), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(server, task_id) else: outcome = assert_proper_response_with_result(response) report_id = outcome response = requests.get( api_url_for(server, 'per_report_resource', report_id=report_id), ) report_result = assert_proper_response_with_result(response) response = requests.post( api_url_for(server, 'per_report_data_resource', report_id=report_id), json={ 'offset': events_offset, 'limit': events_limit, 'ascending': events_ascending_timestamp, }, ) events_result = assert_proper_response_with_result(response) return report_id, report_result, events_result
def test_query_history_async(rotkehlchen_api_server_with_exchanges): """Same as test_query_history but asynchronously""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen start_ts = 1539713237 end_ts = 1539713238 setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, history_start_ts=start_ts, history_end_ts=end_ts, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'async_query': True }, ) task_id = assert_ok_async_response(response) outcome = wait_for_async_task(rotkehlchen_api_server_with_exchanges, task_id) assert len(outcome['result']) == 2 overview = outcome['result']['overview'] assert len(overview) == 10 assert overview["loan_profit"] is not None assert overview["defi_profit_loss"] is not None assert overview["margin_positions_profit_loss"] is not None assert overview["settlement_losses"] is not None assert overview["ethereum_transaction_gas_costs"] is not None assert overview["asset_movement_fees"] is not None assert overview["general_trade_profit_loss"] is not None assert overview["taxable_trade_profit_loss"] is not None assert overview["total_taxable_profit_loss"] is not None assert overview["total_profit_loss"] is not None all_events = outcome['result']['all_events'] assert isinstance(all_events, list) assert len(all_events) == 4
def test_query_history_remote_errors(rotkehlchen_api_server_with_exchanges): """Test that the history processing REST API endpoint works. Similar to test_history.py""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=True, remote_errors=True, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), ) assert_proper_response(response) data = response.json() warnings = rotki.msg_aggregator.consume_warnings() assert len(warnings) == 0 errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 6 assert 'Etherscan API request http://someurl.com returned invalid JSON response: [{' in errors[ 0] # noqa: E501 # The history processing is completely mocked away and omitted in this test. # because it is only for the history creation not its processing. # For history processing tests look at test_accounting.py and # test_accounting_events.py assert 'invalid JSON' in data['message'] assert 'binance' in data['message'] assert 'Bittrex' in data['message'] assert 'Bitmex' in data['message'] assert 'Kraken' in data['message'] assert 'Poloniex' in data['message'] assert data['result'] == {}
def test_history_export_csv( rotkehlchen_api_server_with_exchanges, tmpdir_factory, ): """Test that the csv export REST API endpoint works correctly""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen profit_currency = rotki.data.db.get_main_currency() setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, ) csv_dir = str(tmpdir_factory.mktemp('test_csv_dir')) csv_dir2 = str(tmpdir_factory.mktemp('test_csv_dir2')) # First, query history processing to have data for exporting with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), ) assert_proper_response(response) # now query the export endpoint with json body response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyexportingresource"), json={'directory_path': csv_dir}, ) assert_csv_export_response(response, profit_currency, csv_dir) # now query the export endpoint with query params response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyexportingresource") + f'?directory_path={csv_dir2}', ) assert_csv_export_response(response, profit_currency, csv_dir2)
def test_query_history_remote_errors(rotkehlchen_api_server_with_exchanges): """Test that the history processing REST API endpoint works. Similar to test_history.py""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=True, remote_errors=True, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, 'historyprocessingresource'), ) assert_error_response( response=response, status_code=HTTPStatus.OK, contained_in_msg=[ 'invalid JSON', 'binance', 'Bittrex', 'Bitmex', 'Kraken', 'Poloniex', ], ) warnings = rotki.msg_aggregator.consume_warnings() assert len(warnings) == 0 errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 11 assert all('kraken' in e for e in errors[:2]) assert 'Etherscan API request http://someurl.com returned invalid JSON response: [{' in errors[ 2] # noqa: E501
def test_query_history(rotkehlchen_api_server_with_exchanges): """Test that the history processing REST API endpoint works. Similar to test_history.py""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), ) # Simply check that the results got returned here. The actual correctness of # accounting results is checked in other tests such as test_simple_accounting assert_proper_response(response) data = response.json() assert data['message'] == '' assert len(data['result']) == 2 overview = data['result']['overview'] assert len(overview) == 9 assert overview["loan_profit"] is not None assert overview["margin_positions_profit_loss"] is not None assert overview["settlement_losses"] is not None assert overview["ethereum_transaction_gas_costs"] is not None assert overview["asset_movement_fees"] is not None assert overview["general_trade_profit_loss"] is not None assert overview["taxable_trade_profit_loss"] is not None assert overview["total_taxable_profit_loss"] is not None assert overview["total_profit_loss"] is not None all_events = data['result']['all_events'] assert isinstance(all_events, list) # TODO: These events are not actually checked anywhere for correctness # A test should probably be made for their correctness, even though # they are assumed correct if the overview is correct assert len(all_events) == 36 # And now make sure that warnings have also been generated for the query of # the unsupported/unknown assets warnings = rotki.msg_aggregator.consume_warnings() assert len(warnings) == 13 assert 'kraken trade with unknown asset IDONTEXISTTOO' in warnings[0] assert 'unknown kraken asset IDONTEXIST. Ignoring its deposit/withdrawals query' in warnings[ 1] msg = 'unknown kraken asset IDONTEXISTEITHER. Ignoring its deposit/withdrawals query' assert msg in warnings[2] assert 'poloniex trade with unknown asset NOEXISTINGASSET' in warnings[3] assert 'poloniex trade with unsupported asset BALLS' in warnings[4] assert 'withdrawal of unknown poloniex asset IDONTEXIST' in warnings[5] assert 'withdrawal of unsupported poloniex asset DIS' in warnings[6] assert 'deposit of unknown poloniex asset IDONTEXIST' in warnings[7] assert 'deposit of unsupported poloniex asset EBT' in warnings[8] assert 'poloniex loan with unsupported asset BDC' in warnings[9] assert 'poloniex loan with unknown asset NOTEXISTINGASSET' in warnings[10] assert 'bittrex trade with unsupported asset PTON' in warnings[11] assert 'bittrex trade with unknown asset IDONTEXIST' in warnings[12] errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 3 assert 'kraken trade with unprocessable pair IDONTEXISTZEUR' in errors[0] assert 'kraken trade with unprocessable pair %$#%$#%$#%$#%$#%' in errors[1] assert 'bittrex trade with unprocessable pair %$#%$#%#$%' in errors[2]
def test_query_asset_movements_over_limit( rotkehlchen_api_server_with_exchanges, start_with_valid_premium, ): """Test that using the asset movements query endpoint works fine""" start_ts = 0 end_ts = 1598453214 server = rotkehlchen_api_server_with_exchanges rotki = server.rest_api.rotkehlchen # Make sure online kraken is not queried by setting query ranges rotki.data.db.update_used_query_range( name='kraken_asset_movements', start_ts=start_ts, end_ts=end_ts, ) polo_entries_num = 4 # Set a ton of kraken asset movements in the DB kraken_entries_num = FREE_ASSET_MOVEMENTS_LIMIT + 50 movements = [ AssetMovement(location=Location.KRAKEN, category=AssetMovementCategory.DEPOSIT, address=None, transaction_id=None, timestamp=x, asset=A_BTC, amount=FVal(x * 100), fee_asset=A_BTC, fee=FVal(x), link='') for x in range(kraken_entries_num) ] rotki.data.db.add_asset_movements(movements) all_movements_num = kraken_entries_num + polo_entries_num setup = prepare_rotki_for_history_processing_test( server.rest_api.rotkehlchen) # Check that querying movements with/without limits works even if we query two times for _ in range(2): # query asset movements of polo which has less movements than the limit with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={'location': 'poloniex'}, ) result = assert_proper_response_with_result(response) assert result['entries_found'] == all_movements_num assert result[ 'entries_limit'] == -1 if start_with_valid_premium else FREE_ASSET_MOVEMENTS_LIMIT # noqa: E501 assert_poloniex_asset_movements( [x['entry'] for x in result['entries']], deserialized=True) # now query kraken which has a ton of DB entries response = requests.get( api_url_for(server, "assetmovementsresource"), json={'location': 'kraken'}, ) result = assert_proper_response_with_result(response) if start_with_valid_premium: assert len(result['entries']) == kraken_entries_num assert result['entries_limit'] == -1 assert result['entries_found'] == all_movements_num else: assert len(result['entries'] ) == FREE_ASSET_MOVEMENTS_LIMIT - polo_entries_num assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT assert result['entries_found'] == all_movements_num
def test_query_asset_movements(rotkehlchen_api_server_with_exchanges): """Test that using the asset movements query endpoint works fine""" async_query = random.choice([False, True]) server = rotkehlchen_api_server_with_exchanges setup = prepare_rotki_for_history_processing_test( server.rest_api.rotkehlchen) # setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query asset movements of one specific exchange with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={ 'location': 'poloniex', 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] == 4 assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT poloniex_ids = [x['entry']['identifier'] for x in result['entries']] assert_poloniex_asset_movements([x['entry'] for x in result['entries']], deserialized=True) assert all( x['ignored_in_accounting'] is False for x in result['entries']), 'ignored should be false' # noqa: E501 # now let's ignore all poloniex action ids response = requests.put( api_url_for( rotkehlchen_api_server_with_exchanges, "ignoredactionsresource", ), json={ 'action_type': 'asset movement', 'action_ids': poloniex_ids }, ) result = assert_proper_response_with_result(response) assert set(result['asset movement']) == set(poloniex_ids) # query asset movements of all exchanges with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements([ x['entry'] for x in movements if x['entry']['location'] == 'poloniex' ], True) # noqa: E501 assert_kraken_asset_movements( [x['entry'] for x in movements if x['entry']['location'] == 'kraken'], True) # noqa: E501 def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( to_check_list=[ x['entry'] for x in movements if x['entry']['location'] == 'poloniex' ], deserialized=True, movements_to_check=(1, 2), ) msg = 'poloniex asset movements should have now been ignored for accounting' assert all(x['ignored_in_accounting'] is True for x in movements if x['entry']['location'] == 'poloniex'), msg # noqa: E501 assert_kraken_asset_movements( to_check_list=[ x['entry'] for x in movements if x['entry']['location'] == 'kraken' ], deserialized=True, movements_to_check=(0, 1, 2), ) # and now query them in a specific time range excluding some asset movements data = { 'from_timestamp': 1439994442, 'to_timestamp': 1458994442, 'async_query': async_query } with setup.polo_patch: response = requests.get(api_url_for(server, "assetmovementsresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource") + '?' + urlencode(data)) assert_okay(response)
def test_query_asset_movements(rotkehlchen_api_server_with_exchanges, async_query): """Test that using the asset movements query endpoint works fine""" server = rotkehlchen_api_server_with_exchanges setup = prepare_rotki_for_history_processing_test( server.rest_api.rotkehlchen) # setup = mock_history_processing_and_exchanges(server.rest_api.rotkehlchen) # query asset movements of one specific exchange with setup.polo_patch: response = requests.get( api_url_for( server, "assetmovementsresource", ), json={ 'location': 'poloniex', 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) assert result['entries_found'] == 4 assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT assert_poloniex_asset_movements(result['entries'], deserialized=True) # query asset movements of all exchanges with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( [x for x in movements if x['location'] == 'poloniex'], True) assert_kraken_asset_movements( [x for x in movements if x['location'] == 'kraken'], True) def assert_okay(response): """Helper function for DRY checking below assertions""" if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id) result = outcome['result'] else: result = assert_proper_response_with_result(response) movements = result['entries'] assert_poloniex_asset_movements( to_check_list=[ x for x in movements if x['location'] == 'poloniex' ], deserialized=True, movements_to_check=(1, 2), ) assert_kraken_asset_movements( to_check_list=[x for x in movements if x['location'] == 'kraken'], deserialized=True, movements_to_check=(0, 1, 2), ) # and now query them in a specific time range excluding some asset movements data = { 'from_timestamp': 1439994442, 'to_timestamp': 1458994442, 'async_query': async_query } with setup.polo_patch: response = requests.get(api_url_for(server, "assetmovementsresource"), json=data) assert_okay(response) # do the same but with query args. This serves as test of from/to timestamp with query args with setup.polo_patch: response = requests.get( api_url_for(server, "assetmovementsresource") + '?' + urlencode(data)) assert_okay(response)
def test_query_history(rotkehlchen_api_server_with_exchanges): """Test that the history processing REST API endpoint works. Similar to test_history.py""" async_query = random.choice([False, True]) start_ts = 0 end_ts = 1601040361 rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id, ) else: outcome = assert_proper_response_with_result(response) # Simply check that the results got returned here. The actual correctness of # accounting results is checked in other tests such as test_simple_accounting assert len(outcome) == 5 assert outcome['events_limit'] == FREE_PNL_EVENTS_LIMIT assert outcome['events_processed'] == 27 assert outcome['first_processed_timestamp'] == 1428994442 overview = outcome['overview'] assert len(overview) == 11 assert overview["loan_profit"] is not None assert overview["margin_positions_profit_loss"] is not None assert overview["settlement_losses"] is not None assert overview["ethereum_transaction_gas_costs"] is not None assert overview["asset_movement_fees"] is not None assert overview["general_trade_profit_loss"] is not None assert overview["taxable_trade_profit_loss"] is not None assert overview["total_taxable_profit_loss"] is not None assert overview["total_profit_loss"] is not None assert overview["defi_profit_loss"] is not None assert overview["ledger_actions_profit_loss"] is not None all_events = outcome['all_events'] assert isinstance(all_events, list) # TODO: These events are not actually checked anywhere for correctness # A test should probably be made for their correctness, even though # they are assumed correct if the overview is correct assert len(all_events) == 37 # And now make sure that warnings have also been generated for the query of # the unsupported/unknown assets warnings = rotki.msg_aggregator.consume_warnings() assert len(warnings) == 13 assert 'poloniex trade with unknown asset NOEXISTINGASSET' in warnings[0] assert 'poloniex trade with unsupported asset BALLS' in warnings[1] assert 'withdrawal of unknown poloniex asset IDONTEXIST' in warnings[2] assert 'withdrawal of unsupported poloniex asset DIS' in warnings[3] assert 'deposit of unknown poloniex asset IDONTEXIST' in warnings[4] assert 'deposit of unsupported poloniex asset EBT' in warnings[5] assert 'poloniex loan with unsupported asset BDC' in warnings[6] assert 'poloniex loan with unknown asset NOTEXISTINGASSET' in warnings[7] assert 'bittrex trade with unsupported asset PTON' in warnings[8] assert 'bittrex trade with unknown asset IDONTEXIST' in warnings[9] assert 'kraken trade with unknown asset IDONTEXISTTOO' in warnings[10] assert 'unknown kraken asset IDONTEXIST. Ignoring its deposit/withdrawals' in warnings[ 11] msg = 'unknown kraken asset IDONTEXISTEITHER. Ignoring its deposit/withdrawals query' assert msg in warnings[12] errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 3 assert 'bittrex trade with unprocessable pair %$#%$#%#$%' in errors[0] assert 'kraken trade with unprocessable pair IDONTEXISTZEUR' in errors[1] assert 'kraken trade with unprocessable pair %$#%$#%$#%$#%$#%' in errors[2]
def test_query_history_timerange(rotkehlchen_api_server_with_exchanges): """Same as test_query_history but on a limited timerange""" rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen start_ts = 1539713237 end_ts = 1539713238 setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, history_start_ts=start_ts, history_end_ts=end_ts, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, 'historyprocessingresource'), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts }, ) # Simply check that the results got returned here. The actual correctness of # accounting results is checked in other tests such as test_simple_accounting assert_proper_response(response) data = response.json() assert data['message'] == '' assert len(data['result']) == 5 assert data['result']['events_limit'] == FREE_PNL_EVENTS_LIMIT assert data['result']['events_processed'] == 25 assert data['result']['first_processed_timestamp'] == 1428994442 overview = data['result']['overview'] assert len(overview) == 11 assert overview['loan_profit'] is not None assert overview['margin_positions_profit_loss'] is not None assert overview['settlement_losses'] is not None assert overview['ethereum_transaction_gas_costs'] is not None assert overview['asset_movement_fees'] is not None assert overview['general_trade_profit_loss'] is not None assert overview['taxable_trade_profit_loss'] is not None assert overview['total_taxable_profit_loss'] is not None assert overview['total_profit_loss'] is not None assert overview['defi_profit_loss'] is not None assert overview['ledger_actions_profit_loss'] is not None all_events = data['result']['all_events'] assert isinstance(all_events, list) assert len(all_events) == 4 response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, 'historystatusresource'), ) assert_proper_response(response) data = response.json() assert FVal(data['result']['total_progress']) == 100 assert data['result'][ 'processing_state'] == 'Processing all retrieved historical events'