예제 #1
0
def test_export_snapshot(rotkehlchen_api_server, tmpdir_factory):
    conn = rotkehlchen_api_server.rest_api.rotkehlchen.data.db.conn
    ts = Timestamp(ts_now())
    csv_dir = str(tmpdir_factory.mktemp('test_csv_dir'))
    csv_dir2 = str(tmpdir_factory.mktemp('test_csv_dir2'))
    _populate_db_with_balances(conn, ts)
    _populate_db_with_location_data(conn, ts)

    rotkehlchen_api_server.rest_api.rotkehlchen.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_EUR))  # noqa: E501
    response = requests.post(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotexportingresource',
        ),
        json={
            'timestamp': ts,
            'path': csv_dir,
        },
    )
    assert_csv_export_response(response,
                               csv_dir,
                               main_currency=A_EUR,
                               is_download=False)

    rotkehlchen_api_server.rest_api.rotkehlchen.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_ETH))  # noqa: E501
    response = requests.post(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotexportingresource',
        ),
        json={
            'timestamp': ts,
            'path': csv_dir2,
        },
    )
    assert_csv_export_response(response,
                               csv_dir2,
                               main_currency=A_ETH,
                               is_download=False)

    rotkehlchen_api_server.rest_api.rotkehlchen.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_USD))  # noqa: E501
    response = requests.post(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotexportingresource',
        ),
        json={
            'timestamp': ts,
            'path': csv_dir2,
        },
    )
    assert_csv_export_response(response,
                               csv_dir2,
                               main_currency=A_USD,
                               is_download=False)
예제 #2
0
def test_exchanges_filtering(database, exchange_manager,
                             function_scope_messages_aggregator):
    kraken1 = MockKraken(
        name='mockkraken_1',
        api_key=make_api_key(),
        secret=make_api_secret(),
        database=database,
        msg_aggregator=function_scope_messages_aggregator,
    )
    kraken2 = MockKraken(
        name='mockkraken_2',
        api_key=make_api_key(),
        secret=make_api_secret(),
        database=database,
        msg_aggregator=function_scope_messages_aggregator,
    )
    ftx1 = Ftx(
        name='mockftx_1',
        api_key=make_api_key(),
        secret=make_api_secret(),
        database=database,
        msg_aggregator=function_scope_messages_aggregator,
        ftx_subaccount=None,
    )
    ftx2 = Ftx(
        name='mockftx_2',
        api_key=make_api_key(),
        secret=make_api_secret(),
        database=database,
        msg_aggregator=function_scope_messages_aggregator,
        ftx_subaccount=None,
    )

    exchange_manager.initialize_exchanges({}, database)
    exchange_manager.connected_exchanges[Location.KRAKEN].append(kraken1)
    exchange_manager.connected_exchanges[Location.KRAKEN].append(kraken2)
    exchange_manager.connected_exchanges[Location.FTX].append(ftx1)
    exchange_manager.connected_exchanges[Location.FTX].append(ftx2)
    assert set(exchange_manager.iterate_exchanges()) == {
        kraken1, kraken2, ftx1, ftx2
    }

    database.set_settings(
        ModifiableDBSettings(non_syncing_exchanges=[
            kraken1.location_id(),
            kraken2.location_id()
        ], ))
    assert set(exchange_manager.iterate_exchanges()) == {ftx1, ftx2}

    database.set_settings(
        ModifiableDBSettings(non_syncing_exchanges=[ftx1.location_id()], ))
    assert set(
        exchange_manager.iterate_exchanges()) == {ftx2, kraken1, kraken2}
예제 #3
0
def add_settings_to_test_db(
        db: DBHandler,
        db_settings: Optional[Dict[str, Any]],
        ignored_assets: Optional[List[Asset]],
        data_migration_version: Optional[int],
) -> None:
    settings = {
        # DO not submit usage analytics during tests
        'submit_usage_analytics': False,
        'main_currency': DEFAULT_TESTS_MAIN_CURRENCY,
    }
    # Set the given db_settings. The pre-set values have priority unless overriden here
    if db_settings is not None:
        for key, value in db_settings.items():
            settings[key] = value
    db.set_settings(ModifiableDBSettings(**settings))  # type: ignore

    if ignored_assets:
        for asset in ignored_assets:
            db.add_to_ignored_assets(asset)

    if data_migration_version is not None:
        db.conn.cursor().execute(
            'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
            ('last_data_migration', data_migration_version),
        )
        db.conn.commit()
예제 #4
0
 def put(
         self,
         premium_should_sync: Optional[bool],
         include_crypto2crypto: Optional[bool],
         anonymized_logs: Optional[bool],
         submit_usage_analytics: Optional[bool],
         ui_floating_precision: Optional[int],
         taxfree_after_period: Optional[int],
         balance_save_frequency: Optional[int],
         include_gas_costs: Optional[bool],
         historical_data_start: Optional[str],
         eth_rpc_endpoint: Optional[str],
         main_currency: Optional[Asset],
         date_display_format: Optional[str],
 ) -> Response:
     settings = ModifiableDBSettings(
         premium_should_sync=premium_should_sync,
         include_crypto2crypto=include_crypto2crypto,
         anonymized_logs=anonymized_logs,
         ui_floating_precision=ui_floating_precision,
         taxfree_after_period=taxfree_after_period,
         balance_save_frequency=balance_save_frequency,
         include_gas_costs=include_gas_costs,
         historical_data_start=historical_data_start,
         eth_rpc_endpoint=eth_rpc_endpoint,
         main_currency=main_currency,
         date_display_format=date_display_format,
         submit_usage_analytics=submit_usage_analytics,
     )
     return self.rest_api.set_settings(settings)
예제 #5
0
 def transform_data(  # pylint: disable=no-self-use
     self,
     data: Dict[str, Any],
     **_kwargs: Any,
 ) -> Any:
     return ModifiableDBSettings(
         premium_should_sync=data['premium_should_sync'],
         include_crypto2crypto=data['include_crypto2crypto'],
         anonymized_logs=data['anonymized_logs'],
         ui_floating_precision=data['ui_floating_precision'],
         taxfree_after_period=data['taxfree_after_period'],
         balance_save_frequency=data['balance_save_frequency'],
         include_gas_costs=data['include_gas_costs'],
         historical_data_start=data['historical_data_start'],
         eth_rpc_endpoint=data['eth_rpc_endpoint'],
         main_currency=data['main_currency'],
         date_display_format=data['date_display_format'],
         thousand_separator=data['thousand_separator'],
         decimal_separator=data['decimal_separator'],
         currency_location=data['currency_location'],
         submit_usage_analytics=data['submit_usage_analytics'],
         kraken_account_type=data['kraken_account_type'],
         active_modules=data['active_modules'],
         frontend_settings=data['frontend_settings'],
     )
예제 #6
0
def test_download_snapshot(rotkehlchen_api_server):
    conn = rotkehlchen_api_server.rest_api.rotkehlchen.data.db.conn
    ts = Timestamp(ts_now())
    _populate_db_with_balances(conn, ts)
    _populate_db_with_location_data(conn, ts)

    rotkehlchen_api_server.rest_api.rotkehlchen.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_EUR))  # noqa: E501
    response = requests.post(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotdownloadingresource',
        ),
        json={'timestamp': ts},
    )
    with tempfile.TemporaryDirectory() as tmpdirname:
        tempzipfile = Path(tmpdirname, 'temp.zip')
        extractdir = Path(tmpdirname, 'extractdir')
        tempzipfile.write_bytes(response.content)
        with zipfile.ZipFile(tempzipfile, 'r') as zip_ref:
            zip_ref.extractall(extractdir)
        assert_csv_export_response(response,
                                   extractdir,
                                   main_currency=A_EUR,
                                   is_download=True)
예제 #7
0
 def transform_data(  # pylint: disable=no-self-use
     self,
     data: Dict[str, Any],
     **_kwargs: Any,
 ) -> Any:
     return ModifiableDBSettings(
         premium_should_sync=data['premium_should_sync'],
         include_crypto2crypto=data['include_crypto2crypto'],
         anonymized_logs=data['anonymized_logs'],
         ui_floating_precision=data['ui_floating_precision'],
         taxfree_after_period=data['taxfree_after_period'],
         balance_save_frequency=data['balance_save_frequency'],
         include_gas_costs=data['include_gas_costs'],
         eth_rpc_endpoint=data['eth_rpc_endpoint'],
         ksm_rpc_endpoint=data['ksm_rpc_endpoint'],
         main_currency=data['main_currency'],
         date_display_format=data['date_display_format'],
         submit_usage_analytics=data['submit_usage_analytics'],
         kraken_account_type=data['kraken_account_type'],
         active_modules=data['active_modules'],
         frontend_settings=data['frontend_settings'],
         account_for_assets_movements=data['account_for_assets_movements'],
         btc_derivation_gap_limit=data['btc_derivation_gap_limit'],
         calculate_past_cost_basis=data['calculate_past_cost_basis'],
         display_date_in_localtime=data['display_date_in_localtime'],
         historical_price_oracles=data['historical_price_oracles'],
         current_price_oracles=data['current_price_oracles'],
     )
예제 #8
0
def test_upload_data_to_server_same_hash(rotkehlchen_instance, db_password):
    """Test that if the server has same data hash as we no upload happens"""
    last_ts = rotkehlchen_instance.data.db.get_last_data_upload_ts()
    assert last_ts == 0

    # Write anything in the DB to set a non-zero last_write_ts
    rotkehlchen_instance.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_EUR))
    _, our_hash = rotkehlchen_instance.data.compress_and_encrypt_db(
        db_password)
    remote_hash = our_hash

    patched_put = patch.object(
        rotkehlchen_instance.premium.session,
        'put',
        return_value=None,
    )
    patched_get = create_patched_requests_get_for_premium(
        session=rotkehlchen_instance.premium.session,
        metadata_last_modify_ts=0,
        metadata_data_hash=remote_hash,
        # Smaller Remote DB size
        metadata_data_size=2,
        saved_data='foo',
    )

    with patched_get, patched_put as put_mock:
        rotkehlchen_instance.premium_sync_manager.maybe_upload_data_to_server()
        # The upload mock should not have been called since the hash is the same
        assert not put_mock.called
예제 #9
0
def _init_database(
    data_dir: FilePath,
    password: str,
    msg_aggregator: MessagesAggregator,
    db_settings: Optional[Dict[str, Any]],
    ignored_assets: Optional[List[Asset]],
    blockchain_accounts: BlockchainAccounts,
) -> DBHandler:
    db = DBHandler(data_dir, password, msg_aggregator)
    settings = {
        # DO not submit usage analytics during tests
        'submit_usage_analytics': False,
        'main_currency': DEFAULT_TESTS_MAIN_CURRENCY,
    }
    # Set the given db_settings. The pre-set values have priority unless overriden here
    if db_settings is not None:
        for key, value in db_settings.items():
            settings[key] = value
    db.set_settings(ModifiableDBSettings(**settings))

    if ignored_assets:
        for asset in ignored_assets:
            db.add_to_ignored_assets(asset)

    # Make sure that the fixture provided accounts are in the blockchain
    db.add_blockchain_accounts(SupportedBlockchain.ETHEREUM,
                               blockchain_accounts.eth)
    db.add_blockchain_accounts(SupportedBlockchain.BITCOIN,
                               blockchain_accounts.btc)

    return db
예제 #10
0
def test_upload_data_to_server(rotkehlchen_instance, username, db_password):
    """Test our side of uploading data to the server"""
    last_ts = rotkehlchen_instance.data.db.get_last_data_upload_ts()
    assert last_ts == 0

    # Write anything in the DB to set a non-zero last_write_ts
    rotkehlchen_instance.data.db.set_settings(ModifiableDBSettings(main_currency=A_GBP))
    last_write_ts = rotkehlchen_instance.data.db.get_last_write_ts()
    _, our_hash = rotkehlchen_instance.data.compress_and_encrypt_db(db_password)
    remote_hash = get_different_hash(our_hash)

    def mock_succesfull_upload_data_to_server(
            url,  # pylint: disable=unused-argument
            data,
            timeout,  # pylint: disable=unused-argument
    ):
        # Can't compare data blobs as they are encrypted and as such can be
        # different each time
        assert 'data_blob' in data
        assert data['original_hash'] == our_hash
        assert data['last_modify_ts'] == last_write_ts
        assert 'index' in data
        assert len(data['data_blob']) == data['length']
        assert 'nonce' in data
        assert data['compression'] == 'zlib'

        return MockResponse(200, '{"success": true}')

    patched_put = patch.object(
        rotkehlchen_instance.premium.session,
        'put',
        side_effect=mock_succesfull_upload_data_to_server,
    )
    patched_get = create_patched_requests_get_for_premium(
        session=rotkehlchen_instance.premium.session,
        metadata_last_modify_ts=0,
        metadata_data_hash=remote_hash,
        # Smaller Remote DB size
        metadata_data_size=2,
        saved_data='foo',
    )

    now = ts_now()
    with patched_get, patched_put:
        rotkehlchen_instance.premium_sync_manager.maybe_upload_data_to_server()

    last_ts = rotkehlchen_instance.data.db.get_last_data_upload_ts()
    msg = 'The last data upload timestamp should have been saved in the db as now'
    assert last_ts >= now and last_ts - now < 50, msg
    last_ts = rotkehlchen_instance.premium_sync_manager.last_data_upload_ts
    msg = 'The last data upload timestamp should also be in memory'
    assert last_ts >= now and last_ts - now < 50, msg

    # and now logout and login again and make sure that the last_data_upload_ts is correct
    rotkehlchen_instance.logout()
    rotkehlchen_instance.data.unlock(username, db_password, create_new=False)
    assert last_ts == rotkehlchen_instance.premium_sync_manager.last_data_upload_ts
    assert last_ts == rotkehlchen_instance.data.db.get_last_data_upload_ts()
예제 #11
0
파일: test_db.py 프로젝트: djibix/rotki
def test_settings_entry_types(database):
    database.set_settings(
        ModifiableDBSettings(
            premium_should_sync=True,
            include_crypto2crypto=True,
            anonymized_logs=True,
            ui_floating_precision=1,
            taxfree_after_period=1,
            include_gas_costs=True,
            historical_data_start='01/08/2015',
            eth_rpc_endpoint='http://localhost:8545',
            balance_save_frequency=24,
            date_display_format='%d/%m/%Y %H:%M:%S %z',
            thousand_separator=',',
            decimal_separator='.',
            currency_location='after',
            submit_usage_analytics=False,
        ))

    res = database.get_settings()
    assert isinstance(res.version, int)
    assert res.version == ROTKEHLCHEN_DB_VERSION
    assert isinstance(res.last_write_ts, int)
    assert isinstance(res.premium_should_sync, bool)
    # assert res.premium_should_sync is DEFAULT_PREMIUM_SHOULD_SYNC
    assert res.premium_should_sync is True
    assert isinstance(res.include_crypto2crypto, bool)
    assert res.include_crypto2crypto is True
    assert isinstance(res.ui_floating_precision, int)
    assert res.ui_floating_precision == 1
    assert isinstance(res.taxfree_after_period, int)
    assert res.taxfree_after_period == 1
    assert isinstance(res.historical_data_start, str)
    assert res.historical_data_start == '01/08/2015'
    assert isinstance(res.eth_rpc_endpoint, str)
    assert res.eth_rpc_endpoint == 'http://localhost:8545'
    assert isinstance(res.balance_save_frequency, int)
    assert res.balance_save_frequency == 24
    assert isinstance(res.last_balance_save, int)
    assert res.last_balance_save == 0
    assert isinstance(res.main_currency, Asset)
    assert res.main_currency == DEFAULT_TESTS_MAIN_CURRENCY
    assert isinstance(res.anonymized_logs, bool)
    assert res.anonymized_logs is True
    assert isinstance(res.date_display_format, str)
    assert res.date_display_format == '%d/%m/%Y %H:%M:%S %z'
    assert isinstance(res.thousand_separator, str)
    assert res.thousand_separator == ','
    assert isinstance(res.decimal_separator, str)
    assert res.decimal_separator == '.'
    assert isinstance(res.currency_location, str)
    assert res.currency_location == 'after'
    assert isinstance(res.submit_usage_analytics, bool)
    assert res.submit_usage_analytics is False
    assert isinstance(res.active_modules, list)
    assert res.active_modules == DEFAULT_ACTIVE_MODULES
    assert isinstance(res.frontend_settings, str)
    assert res.frontend_settings == ''
예제 #12
0
def test_balance_save_frequency_check(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    now = int(time.time())
    data_save_ts = now - 24 * 60 * 60 + 20
    data.db.add_multiple_location_data([LocationData(
        time=data_save_ts, location=Location.KRAKEN.serialize_for_db(), usd_value='1500',
    )])

    assert not data.should_save_balances()
    data.db.set_settings(ModifiableDBSettings(balance_save_frequency=5))
    assert data.should_save_balances()

    last_save_ts = data.db.get_last_balance_save_time()
    assert last_save_ts == data_save_ts
예제 #13
0
def test_can_unlock_db_with_disabled_taxfree_after_period(data_dir, username):
    """Test that with taxfree_after_period being empty the DB can be opened

    Regression test for https://github.com/rotki/rotki/issues/587
    """
    # Set the setting
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)
    data.db.set_settings(ModifiableDBSettings(taxfree_after_period=-1))

    # now relogin and check that no exception is thrown
    del data
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=False)
    settings = data.db.get_settings()
    assert settings.taxfree_after_period is None
예제 #14
0
def _init_database(
    data_dir: FilePath,
    password: str,
    msg_aggregator: MessagesAggregator,
    db_settings: Optional[Dict[str, Any]],
    ignored_assets: Optional[List[Asset]],
    blockchain_accounts: BlockchainAccounts,
    include_etherscan_key: bool,
) -> DBHandler:
    db = DBHandler(data_dir, password, msg_aggregator)
    settings = {
        # DO not submit usage analytics during tests
        'submit_usage_analytics': False,
        'main_currency': DEFAULT_TESTS_MAIN_CURRENCY,
    }
    # Set the given db_settings. The pre-set values have priority unless overriden here
    if db_settings is not None:
        for key, value in db_settings.items():
            settings[key] = value
    db.set_settings(ModifiableDBSettings(**settings))

    if ignored_assets:
        for asset in ignored_assets:
            db.add_to_ignored_assets(asset)

    # Make sure that the fixture provided accounts are in the blockchain
    db.add_blockchain_accounts(
        SupportedBlockchain.ETHEREUM,
        [BlockchainAccountData(address=x) for x in blockchain_accounts.eth],
    )
    db.add_blockchain_accounts(
        SupportedBlockchain.BITCOIN,
        [BlockchainAccountData(address=x) for x in blockchain_accounts.btc],
    )
    if include_etherscan_key:
        # Add the tests only etherscan API key
        db.add_external_service_credentials([
            ExternalServiceApiCredentials(
                service=ExternalService.ETHERSCAN,
                api_key=ApiKey('8JT7WQBB2VQP5C3416Y8X3S8GBA3CVZKP4'),
            )
        ])

    return db
예제 #15
0
파일: database.py 프로젝트: step21/rotki
def add_settings_to_test_db(
    db: DBHandler,
    db_settings: Optional[Dict[str, Any]],
    ignored_assets: Optional[List[Asset]],
) -> None:
    settings = {
        # DO not submit usage analytics during tests
        'submit_usage_analytics': False,
        'main_currency': DEFAULT_TESTS_MAIN_CURRENCY,
    }
    # Set the given db_settings. The pre-set values have priority unless overriden here
    if db_settings is not None:
        for key, value in db_settings.items():
            settings[key] = value
    db.set_settings(ModifiableDBSettings(**settings))  # type: ignore

    if ignored_assets:
        for asset in ignored_assets:
            db.add_to_ignored_assets(asset)
예제 #16
0
def test_delete_snapshot(rotkehlchen_api_server):
    conn = rotkehlchen_api_server.rest_api.rotkehlchen.data.db.conn
    ts = Timestamp(ts_now())
    _populate_db_with_balances(conn, ts)
    _populate_db_with_location_data(conn, ts)
    rotkehlchen_api_server.rest_api.rotkehlchen.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_EUR))  # noqa: E501
    response = requests.delete(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotdeletingresource',
        ),
        json={'timestamp': ts},
    )
    assert_simple_ok_response(response)
    cursor = conn.cursor()
    assert len(
        cursor.execute('SELECT time FROM timed_balances WHERE time=?',
                       (ts, )).fetchall()) == 0  # noqa: 501
    assert len(
        cursor.execute('SELECT time FROM timed_location_data WHERE time=?',
                       (ts, )).fetchall()) == 0  # noqa: 501

    # check that an error is thrown for invalid timestamp
    response = requests.delete(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotdeletingresource',
        ),
        json={'timestamp': 1000000},
    )
    assert_error_response(
        response,
        contained_in_msg='No snapshot found for the specified timestamp',
        status_code=HTTPStatus.CONFLICT,
    )
예제 #17
0
def test_import_snapshot(rotkehlchen_api_server, tmpdir_factory):
    conn = rotkehlchen_api_server.rest_api.rotkehlchen.data.db.conn
    ts = Timestamp(ts_now())
    _populate_db_with_balances(conn, ts)
    _populate_db_with_location_data(conn, ts)
    rotkehlchen_api_server.rest_api.rotkehlchen.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_EUR))  # noqa: E501

    # check that importing a valid snapshot passes using PUT
    csv_dir = str(tmpdir_factory.mktemp('test_csv_dir'))
    _create_snapshot_with_valid_data(csv_dir, Timestamp(1651071105))
    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotimportingresource',
        ),
        json={
            'balances_snapshot_file':
            f'{csv_dir}/{BALANCES_FOR_IMPORT_FILENAME}',
            'location_data_snapshot_file':
            f'{csv_dir}/{LOCATION_DATA_IMPORT_FILENAME}',
        },
    )
    assert_simple_ok_response(response)

    # check that POST with the file works.
    csv_dir2 = str(tmpdir_factory.mktemp('test_csv_dir_2'))
    _create_snapshot_with_valid_data_for_post(csv_dir2, Timestamp(1651075))
    response = requests.post(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotimportingresource',
        ),
        files={
            'balances_snapshot_file':
            open(f'{csv_dir2}/{BALANCES_FOR_IMPORT_FILENAME}'),
            'location_data_snapshot_file':
            open(f'{csv_dir2}/{LOCATION_DATA_IMPORT_FILENAME}'),
        },
    )
    assert_simple_ok_response(response)

    # check that importing a snapshot that is present in the db fails.
    csv_dir3 = str(tmpdir_factory.mktemp('test_csv_dir3'))
    response = requests.post(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotexportingresource',
        ),
        json={
            'timestamp': ts,
            'path': csv_dir3,
        },
    )
    assert_csv_export_response(response,
                               csv_dir3,
                               main_currency=A_EUR,
                               is_download=False)
    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotimportingresource',
        ),
        json={
            'balances_snapshot_file':
            f'{csv_dir3}/{BALANCES_FOR_IMPORT_FILENAME}',
            'location_data_snapshot_file':
            f'{csv_dir3}/{LOCATION_DATA_IMPORT_FILENAME}',
        },
    )
    assert_error_response(
        response,
        contained_in_msg='Adding timed_balance failed',
        status_code=HTTPStatus.CONFLICT,
    )

    # check that importing snapshot with different timestamps fails.
    csv_dir4 = str(tmpdir_factory.mktemp('test_csv_dir4'))
    _create_snapshot_different_timestamps(csv_dir4, ts)
    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotimportingresource',
        ),
        json={
            'balances_snapshot_file':
            f'{csv_dir4}/{BALANCES_FOR_IMPORT_FILENAME}',
            'location_data_snapshot_file':
            f'{csv_dir4}/{LOCATION_DATA_IMPORT_FILENAME}',
        },
    )
    assert_error_response(
        response,
        contained_in_msg='csv file has different timestamps',
        status_code=HTTPStatus.CONFLICT,
    )

    # check that importing snapshot with invalid header fails.
    csv_dir5 = str(tmpdir_factory.mktemp('test_csv_dir5'))
    _create_snapshot_with_invalid_headers(csv_dir5, ts)
    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotimportingresource',
        ),
        json={
            'balances_snapshot_file':
            f'{csv_dir5}/{BALANCES_FOR_IMPORT_FILENAME}',
            'location_data_snapshot_file':
            f'{csv_dir5}/{LOCATION_DATA_IMPORT_FILENAME}',
        },
    )
    assert_error_response(
        response,
        contained_in_msg='csv file has invalid headers',
        status_code=HTTPStatus.CONFLICT,
    )

    # check that importing snapshot with unknown asset_identifier fails.
    csv_dir6 = str(tmpdir_factory.mktemp('test_csv_dir6'))
    _create_snapshot_with_unknown_asset(csv_dir6, ts)
    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            'dbsnapshotimportingresource',
        ),
        json={
            'balances_snapshot_file':
            f'{csv_dir6}/{BALANCES_FOR_IMPORT_FILENAME}',
            'location_data_snapshot_file':
            f'{csv_dir6}/{LOCATION_DATA_IMPORT_FILENAME}',
        },
    )
    assert_error_response(
        response,
        contained_in_msg='snapshot contains an unknown asset',
        status_code=HTTPStatus.CONFLICT,
    )