def test_settings_entry_types(data_dir, username): data = DataHandler(data_dir) data.unlock(username, '123', create_new=True) data.db.set_settings({ 'version': 1, 'last_write_ts': 1, 'premium_should_sync': True, 'include_crypto2crypto': True, 'last_data_upload_ts': 1, 'ui_floating_precision': 1, 'taxfree_after_period': 1, 'historical_data_start': '01/08/2015', 'eth_rpc_port': '8545', 'balance_save_frequency': 24, }) res = data.db.get_settings() assert isinstance(res['db_version'], int) assert isinstance(res['last_write_ts'], int) assert isinstance(res['premium_should_sync'], bool) assert isinstance(res['include_crypto2crypto'], bool) assert isinstance(res['ui_floating_precision'], int) assert isinstance(res['taxfree_after_period'], int) assert isinstance(res['historical_data_start'], str) assert isinstance(res['eth_rpc_port'], str) assert isinstance(res['balance_save_frequency'], int) assert isinstance(res['last_balance_save'], int)
def test_query_timed_balances(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) data.db.add_multiple_balances(asset_balances) result = data.db.query_timed_balances( from_ts=1451606401, to_ts=1485907100, asset=A_USD, ) assert len(result) == 1 assert result[0].time == 1465171200 assert result[0].amount == '500' assert result[0].usd_value == '500' result = data.db.query_timed_balances( from_ts=1451606300, to_ts=1485907000, asset=A_ETH, ) assert len(result) == 2 assert result[0].time == 1451606401 assert result[0].amount == '2' assert result[0].usd_value == '1.7068' assert result[1].time == 1465171201 assert result[1].amount == '10' assert result[1].usd_value == '123'
def test_upgrade_db_4_to_5(data_dir, username): """Test upgrading the DB from version 4 to version 5, rename BCC to BCH""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) with creation_patch: data.unlock(username, '123', create_new=True) # Manually set version (Both here and in 2 -> 3 it needs to be done like this and # target patch can't be used for some reason. Still have not debugged what fails cursor = data.db.conn.cursor() cursor.execute( 'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)', ('version', str(4)), ) data.db.conn.commit() populate_db_and_check_for_asset_renaming( cursor=cursor, data=data, data_dir=data_dir, msg_aggregator=msg_aggregator, username=username, to_rename_asset='BCC', renamed_asset=A_BCH, target_version=5, ) # Also make sure that we have updated to the target version assert data.db.get_version() == 5
def __init__(self, args): # --cache related variable start self.lock = Semaphore() self.lock.acquire() self.results_cache: ResultCache = dict() self.cache_ttl_secs = CACHE_RESPONSE_FOR_SECS # --cache related variable end self.premium = None self.connected_exchanges = [] self.user_is_logged_in = False logfilename = None if args.logtarget == 'file': logfilename = args.logfile if args.loglevel == 'debug': loglevel = logging.DEBUG elif args.loglevel == 'info': loglevel = logging.INFO elif args.loglevel == 'warn': loglevel = logging.WARN elif args.loglevel == 'error': loglevel = logging.ERROR elif args.loglevel == 'critical': loglevel = logging.CRITICAL else: raise ValueError('Should never get here. Illegal log value') logging.basicConfig( filename=logfilename, filemode='w', level=loglevel, format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s', datefmt='%d/%m/%Y %H:%M:%S %Z', ) if not args.logfromothermodules: logging.getLogger('zerorpc').setLevel(logging.CRITICAL) logging.getLogger('zerorpc.channel').setLevel(logging.CRITICAL) logging.getLogger('urllib3').setLevel(logging.CRITICAL) logging.getLogger('urllib3.connectionpool').setLevel(logging.CRITICAL) self.sleep_secs = args.sleep_secs self.data_dir = args.data_dir self.args = args self.poloniex = None self.kraken = None self.bittrex = None self.bitmex = None self.binance = None self.msg_aggregator = MessagesAggregator() self.data = DataHandler(self.data_dir, self.msg_aggregator) # Initialize the Inquirer singleton Inquirer(data_dir=self.data_dir) self.lock.release() self.shutdown_event = gevent.event.Event()
def test_add_margin_positions(data_dir, username, caplog): """Test that adding and retrieving margin positions from the DB works fine. Also duplicates should be ignored and an error returned """ msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) margin1 = MarginPosition( location=Location.BITMEX, open_time=1451606400, close_time=1451616500, profit_loss=FVal('1.0'), pl_currency=A_BTC, fee=Fee(FVal('0.01')), fee_currency=A_EUR, link='', notes='', ) margin2 = MarginPosition( location=Location.BITMEX, open_time=1451626500, close_time=1451636500, profit_loss=FVal('0.5'), pl_currency=A_BTC, fee=Fee(FVal('0.01')), fee_currency=A_EUR, link='', notes='', ) margin3 = MarginPosition( location=Location.POLONIEX, open_time=1452636501, close_time=1459836501, profit_loss=FVal('2.5'), pl_currency=A_BTC, fee=Fee(FVal('0.01')), fee_currency=A_EUR, link='', notes='', ) # Add and retrieve the first 2 margins. All should be fine. data.db.add_margin_positions([margin1, margin2]) errors = msg_aggregator.consume_errors() warnings = msg_aggregator.consume_warnings() assert len(errors) == 0 assert len(warnings) == 0 returned_margins = data.db.get_margin_positions() assert returned_margins == [margin1, margin2] # Add the last 2 margins. Since margin2 already exists in the DB it should be # ignored and a warning should be logged data.db.add_margin_positions([margin2, margin3]) assert ( 'Did not add "Margin position with id 0a57acc1f4c09da0f194c59c4cd240e6' '8e2d36e56c05b3f7115def9b8ee3943f') in caplog.text returned_margins = data.db.get_margin_positions() assert returned_margins == [margin1, margin2, margin3]
def test_remove_queried_address_on_account_remove(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) data.db.add_blockchain_accounts( SupportedBlockchain.ETHEREUM, [ BlockchainAccountData( address='0xd36029d76af6fE4A356528e4Dc66B2C18123597D'), ], ) queried_addresses = QueriedAddresses(data.db) queried_addresses.add_queried_address_for_module( 'makerdao_vaults', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D', ) addresses = queried_addresses.get_queried_addresses_for_module( 'makerdao_vaults') assert '0xd36029d76af6fE4A356528e4Dc66B2C18123597D' in addresses data.db.remove_blockchain_accounts( SupportedBlockchain.ETHEREUM, ['0xd36029d76af6fE4A356528e4Dc66B2C18123597D'], ) addresses = queried_addresses.get_queried_addresses_for_module( 'makerdao_vaults') assert not addresses
def test_data_set_fiat_balance(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) amount_eur = '100' amount_cny = '500' success, _ = data.set_fiat_balance(S_EUR, amount_eur) assert success success, _ = data.set_fiat_balance(S_CNY, amount_cny) assert success balances = data.get_fiat_balances() assert len(balances) == 2 assert balances[S_EUR] == amount_eur assert balances[S_CNY] == amount_cny success, _ = data.set_fiat_balance(S_EUR, '') balances = data.get_fiat_balances() assert len(balances) == 1 assert balances[S_CNY] == amount_cny # also check that all the fiat assets in the fiat table are in # all_assets.json for fiat in FIAT_CURRENCIES: success, _ = data.set_fiat_balance(fiat, '1') assert success
def test_can_unlock_db_with_disabled_taxfree_after_period(data_dir, username): """Test that with taxfree_after_period being empty the DB can be opened Regression test for https://github.com/rotki/rotki/issues/587 """ # Set the setting msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) data.db.set_settings(ModifiableDBSettings(taxfree_after_period=-1)) # now relogin and check that no exception is thrown del data data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=False) settings = data.db.get_settings() assert settings.taxfree_after_period is None
def test_add_asset_movements(data_dir, username): """Test that adding and retrieving asset movements from the DB works fine. Also duplicates should be ignored and an error returned """ msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) movement1 = AssetMovement( location=Location.BITMEX, category=AssetMovementCategory.DEPOSIT, timestamp=1451606400, asset=A_BTC, amount=FVal('1.0'), fee_asset=A_EUR, fee=Fee(FVal('0')), link='', ) movement2 = AssetMovement( location=Location.POLONIEX, category=AssetMovementCategory.WITHDRAWAL, timestamp=1451608501, asset=A_ETH, amount=FVal('1.0'), fee_asset=A_EUR, fee=Fee(FVal('0.01')), link='', ) movement3 = AssetMovement( location=Location.BITTREX, category=AssetMovementCategory.WITHDRAWAL, timestamp=1461708501, asset=A_ETH, amount=FVal('1.0'), fee_asset=A_EUR, fee=Fee(FVal('0.01')), link='', ) # Add and retrieve the first 2 margins. All should be fine. data.db.add_asset_movements([movement1, movement2]) errors = msg_aggregator.consume_errors() warnings = msg_aggregator.consume_warnings() assert len(errors) == 0 assert len(warnings) == 0 returned_movements = data.db.get_asset_movements() assert returned_movements == [movement1, movement2] # Add the last 2 movements. Since movement2 already exists in the DB it should be # ignored and a warning should be shown data.db.add_asset_movements([movement2, movement3]) errors = msg_aggregator.consume_errors() warnings = msg_aggregator.consume_warnings() assert len(errors) == 0 assert len(warnings) == 1 returned_movements = data.db.get_asset_movements() assert returned_movements == [movement1, movement2, movement3]
def test_settings_entry_types(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) success, msg = data.set_settings({ 'last_write_ts': 1, 'premium_should_sync': True, 'include_crypto2crypto': True, 'last_data_upload_ts': 1, 'ui_floating_precision': 1, 'taxfree_after_period': 1, 'historical_data_start': '01/08/2015', 'eth_rpc_endpoint': 'http://localhost:8545', 'balance_save_frequency': 24, 'anonymized_logs': True, 'date_display_format': '%d/%m/%Y %H:%M:%S %z', }) assert success assert msg == '', f'set settings returned error: "{msg}"' res = data.db.get_settings() assert isinstance(res['db_version'], int) assert res['db_version'] == ROTKEHLCHEN_DB_VERSION assert isinstance(res['last_write_ts'], int) assert isinstance(res['premium_should_sync'], bool) assert res['premium_should_sync'] is True assert isinstance(res['include_crypto2crypto'], bool) assert res['include_crypto2crypto'] is True assert isinstance(res['ui_floating_precision'], int) assert res['ui_floating_precision'] == 1 assert isinstance(res['taxfree_after_period'], int) assert res['taxfree_after_period'] == 1 assert isinstance(res['historical_data_start'], str) assert res['historical_data_start'] == '01/08/2015' assert isinstance(res['eth_rpc_endpoint'], str) assert res['eth_rpc_endpoint'] == 'http://localhost:8545' assert isinstance(res['balance_save_frequency'], int) assert res['balance_save_frequency'] == 24 assert isinstance(res['last_balance_save'], int) assert res['last_balance_save'] == 0 assert isinstance(res['main_currency'], str) assert res['main_currency'] == 'USD' assert isinstance(res['anonymized_logs'], bool) assert res['anonymized_logs'] is True assert isinstance(res['date_display_format'], str) assert res['date_display_format'] == '%d/%m/%Y %H:%M:%S %z'
def __init__(self, args: argparse.Namespace) -> None: """Initialize the Rotkehlchen object May Raise: - SystemPermissionError if the given data directory's permissions are not correct. """ self.lock = Semaphore() self.lock.acquire() # Can also be None after unlock if premium credentials did not # authenticate or premium server temporarily offline self.premium: Optional[Premium] = None self.user_is_logged_in: bool = False configure_logging(args) self.sleep_secs = args.sleep_secs if args.data_dir is None: self.data_dir = default_data_directory() else: self.data_dir = Path(args.data_dir) if not os.access(self.data_dir, os.W_OK | os.R_OK): raise SystemPermissionError( f'The given data directory {self.data_dir} is not readable or writable', ) self.args = args self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator) self.exchange_manager = ExchangeManager(msg_aggregator=self.msg_aggregator) # Initialize the AssetResolver singleton AssetResolver(data_directory=self.data_dir) self.data = DataHandler(self.data_dir, self.msg_aggregator) self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None) self.coingecko = Coingecko() self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko) self.greenlet_manager.spawn_and_track( after_seconds=None, task_name='periodically_query_icons_until_all_cached', method=self.icon_manager.periodically_query_icons_until_all_cached, batch_size=ICONS_BATCH_SIZE, sleep_time_secs=ICONS_QUERY_SLEEP, ) # Initialize the Inquirer singleton Inquirer( data_dir=self.data_dir, cryptocompare=self.cryptocompare, coingecko=self.coingecko, ) # Keeps how many trades we have found per location. Used for free user limiting self.actions_per_location: Dict[str, Dict[Location, int]] = { 'trade': defaultdict(int), 'asset_movement': defaultdict(int), } self.lock.release() self.shutdown_event = gevent.event.Event()
def __init__(self, args: argparse.Namespace) -> None: self.lock = Semaphore() self.lock.acquire() # Can also be None after unlock if premium credentials did not # authenticate or premium server temporarily offline self.premium: Optional[Premium] = None self.user_is_logged_in = False logfilename = None if args.logtarget == 'file': logfilename = args.logfile if args.loglevel == 'debug': loglevel = logging.DEBUG elif args.loglevel == 'info': loglevel = logging.INFO elif args.loglevel == 'warn': loglevel = logging.WARN elif args.loglevel == 'error': loglevel = logging.ERROR elif args.loglevel == 'critical': loglevel = logging.CRITICAL else: raise AssertionError('Should never get here. Illegal log value') logging.basicConfig( filename=logfilename, filemode='w', level=loglevel, format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s', datefmt='%d/%m/%Y %H:%M:%S %Z', ) if not args.logfromothermodules: logging.getLogger('urllib3').setLevel(logging.CRITICAL) logging.getLogger('urllib3.connectionpool').setLevel( logging.CRITICAL) self.sleep_secs = args.sleep_secs self.data_dir = args.data_dir self.args = args self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager( msg_aggregator=self.msg_aggregator) self.exchange_manager = ExchangeManager( msg_aggregator=self.msg_aggregator) self.all_eth_tokens = AssetResolver().get_all_eth_tokens() self.data = DataHandler(self.data_dir, self.msg_aggregator) self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None) # Initialize the Inquirer singleton Inquirer(data_dir=self.data_dir, cryptocompare=self.cryptocompare) self.lock.release() self.shutdown_event = gevent.event.Event()
def __init__(self, args: argparse.Namespace) -> None: """Initialize the Rotkehlchen object This runs during backend initialization so it should be as light as possible. May Raise: - SystemPermissionError if the given data directory's permissions are not correct. """ # Can also be None after unlock if premium credentials did not # authenticate or premium server temporarily offline self.premium: Optional[Premium] = None self.user_is_logged_in: bool = False configure_logging(args) self.sleep_secs = args.sleep_secs if args.data_dir is None: self.data_dir = default_data_directory() else: self.data_dir = Path(args.data_dir) self.data_dir.mkdir(parents=True, exist_ok=True) if not os.access(self.data_dir, os.W_OK | os.R_OK): raise SystemPermissionError( f'The given data directory {self.data_dir} is not readable or writable', ) self.main_loop_spawned = False self.args = args self.api_task_greenlets: List[gevent.Greenlet] = [] self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager( msg_aggregator=self.msg_aggregator) self.exchange_manager = ExchangeManager( msg_aggregator=self.msg_aggregator) # Initialize the GlobalDBHandler singleton. Has to be initialized BEFORE asset resolver GlobalDBHandler(data_dir=self.data_dir) self.data = DataHandler(self.data_dir, self.msg_aggregator) self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None) self.coingecko = Coingecko() self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko) self.assets_updater = AssetsUpdater(self.msg_aggregator) # Initialize the Inquirer singleton Inquirer( data_dir=self.data_dir, cryptocompare=self.cryptocompare, coingecko=self.coingecko, ) # Keeps how many trades we have found per location. Used for free user limiting self.actions_per_location: Dict[str, Dict[Location, int]] = { 'trade': defaultdict(int), 'asset_movement': defaultdict(int), } self.task_manager: Optional[TaskManager] = None self.shutdown_event = gevent.event.Event()
def _user_creation_and_login(username, password, data_dir, msg_aggregator): handler = DataHandler( data_directory=data_dir, msg_aggregator=msg_aggregator, ) filepath = handler.unlock(username=username, password=password, create_new=True) assert filepath is not None # Also login as non-new user with same password handler.logout() filepath = handler.unlock(username=username, password=password, create_new=False) assert filepath is not None
def test_upgrade_db_1_to_2(data_dir, username): """Test upgrading the DB from version 1 to version 2""" # Creating a new data dir should work data = DataHandler(data_dir) data.unlock(username, '123', create_new=True) # Manually set to version 1 and input a non checksummed account cursor = data.db.conn.cursor() cursor.execute('INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)', ('version', str(1))) data.db.conn.commit() data.db.add_blockchain_account( 'ETH', '0xe3580c38b0106899f45845e361ea7f8a0062ef12') # now relogin and check that the account has been re-saved as checksummed del data data = DataHandler(data_dir) data.unlock(username, '123', create_new=False) accounts = data.db.get_blockchain_accounts() assert accounts.eth[0] == '0xe3580C38B0106899F45845E361EA7F8a0062Ef12' assert data.db.get_version() == ROTKEHLCHEN_DB_VERSION
def test_get_netvalue_data_from_date(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) add_starting_balances(data) times, values = data.db.get_netvalue_data(Timestamp(1491607800)) assert len(times) == 1 assert times[0] == 1491607800 assert len(values) == 1 assert values[0] == '10700.5'
def __init__(self, args): self.lock = Semaphore() self.lock.acquire() self.results_cache: typing.ResultCache = dict() self.premium = None self.connected_exchanges = [] logfilename = None if args.logtarget == 'file': logfilename = args.logfile if args.loglevel == 'debug': loglevel = logging.DEBUG elif args.loglevel == 'info': loglevel = logging.INFO elif args.loglevel == 'warn': loglevel = logging.WARN elif args.loglevel == 'error': loglevel = logging.ERROR elif args.loglevel == 'critical': loglevel = logging.CRITICAL else: raise ValueError('Should never get here. Illegal log value') logging.basicConfig( filename=logfilename, filemode='w', level=loglevel, format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s', datefmt='%d/%m/%Y %H:%M:%S %Z', ) if not args.logfromothermodules: logging.getLogger('zerorpc').setLevel(logging.CRITICAL) logging.getLogger('zerorpc.channel').setLevel(logging.CRITICAL) logging.getLogger('urllib3').setLevel(logging.CRITICAL) logging.getLogger('urllib3.connectionpool').setLevel( logging.CRITICAL) self.sleep_secs = args.sleep_secs self.data_dir = args.data_dir self.args = args self.last_data_upload_ts = 0 self.poloniex = None self.kraken = None self.bittrex = None self.bitmex = None self.binance = None self.data = DataHandler(self.data_dir) self.lock.release() self.shutdown_event = gevent.event.Event()
def test_new_user_permission_error(data_dir, function_scope_messages_aggregator): not_allowed_dir = data_dir / 'notallowed' os.mkdir(not_allowed_dir) os.chmod(not_allowed_dir, 0o200) handler = DataHandler( data_directory=not_allowed_dir, msg_aggregator=function_scope_messages_aggregator, ) with pytest.raises(SystemPermissionError): handler.unlock(username='******', password='******', create_new=True) # Change permissions back to that pytest cleanup can clean it os.chmod(not_allowed_dir, 0o777)
def test_db_newer_than_software_raises_error(data_dir, username): """ If the DB version is greater than the current known version in the software warn the user to use the latest version of the software """ msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) # Manually set a bigger version than the current known one cursor = data.db.conn.cursor() cursor.execute( 'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)', ('version', str(ROTKEHLCHEN_DB_VERSION + 1)), ) data.db.conn.commit() # now relogin and check that an error is thrown del data data = DataHandler(data_dir, msg_aggregator) with pytest.raises(DBUpgradeError): data.unlock(username, '123', create_new=False)
def test_get_latest_asset_value_distribution(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) balances = add_starting_balances(data) assets = data.db.get_latest_asset_value_distribution() assert len(assets) == 4 assert assets[0] == balances[0] assert assets[1] == balances[1] assert assets[2] == balances[2] assert assets[3] == balances[3]
def test_export_import_db(data_dir, username): """Create a DB, write some data and then after export/import confirm it's there""" data = DataHandler(data_dir) data.unlock(username, '123', create_new=True) data.set_fiat_balance('EUR', 10) encoded_data, data_hash = data.compress_and_encrypt_db('123') # The server would return them decoded encoded_data = encoded_data.decode() data.decompress_and_decrypt_db('123', encoded_data) fiat_balances = data.get_fiat_balances() assert len(fiat_balances) == 1 assert int(fiat_balances['EUR']) == 10
def __init__(self, args: argparse.Namespace) -> None: self.lock = Semaphore() self.lock.acquire() self.premium = None self.user_is_logged_in = False logfilename = None if args.logtarget == 'file': logfilename = args.logfile if args.loglevel == 'debug': loglevel = logging.DEBUG elif args.loglevel == 'info': loglevel = logging.INFO elif args.loglevel == 'warn': loglevel = logging.WARN elif args.loglevel == 'error': loglevel = logging.ERROR elif args.loglevel == 'critical': loglevel = logging.CRITICAL else: raise ValueError('Should never get here. Illegal log value') logging.basicConfig( filename=logfilename, filemode='w', level=loglevel, format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s', datefmt='%d/%m/%Y %H:%M:%S %Z', ) if not args.logfromothermodules: logging.getLogger('zerorpc').setLevel(logging.CRITICAL) logging.getLogger('zerorpc.channel').setLevel(logging.CRITICAL) logging.getLogger('urllib3').setLevel(logging.CRITICAL) logging.getLogger('urllib3.connectionpool').setLevel( logging.CRITICAL) self.sleep_secs = args.sleep_secs self.data_dir = args.data_dir self.args = args self.msg_aggregator = MessagesAggregator() self.exchange_manager = ExchangeManager( msg_aggregator=self.msg_aggregator) self.data = DataHandler(self.data_dir, self.msg_aggregator) # Initialize the Inquirer singleton Inquirer(data_dir=self.data_dir) self.lock.release() self.shutdown_event = gevent.event.Event()
def test_balance_save_frequency_check(data_dir, username): data = DataHandler(data_dir) data.unlock(username, '123', create_new=True) now = int(time.time()) data.db.add_multiple_location_data([( now - 24 * 60 * 60 + 20, 'kraken', '1500', )]) assert not data.should_save_balances() data.db.set_settings({'balance_save_frequency': 5}) assert data.should_save_balances()
def test_users_query_permission_error(data_dir, function_scope_messages_aggregator): not_allowed_dir = os.path.join(data_dir, 'notallowed') allowed_user_dir = os.path.join(data_dir, 'allowed_user') os.mkdir(not_allowed_dir) os.chmod(not_allowed_dir, 0o200) os.mkdir(allowed_user_dir) Path(Path(allowed_user_dir) / 'rotkehlchen.db').touch() handler = DataHandler( data_directory=data_dir, msg_aggregator=function_scope_messages_aggregator, ) assert handler.get_users() == {'allowed_user': '******'} # Change permissions back to that pytest cleanup can clean it os.chmod(not_allowed_dir, 0o777)
def test_add_and_get_aave_events(data_dir, username): """Test that get aave events works fine and returns only events for what we need""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) addr1 = make_ethereum_address() addr1_events = [AaveEvent( event_type='deposit', asset=A_DAI, value=Balance(amount=FVal(1), usd_value=FVal(1)), block_number=1, timestamp=Timestamp(1), tx_hash='0x01653e88600a6492ad6e9ae2af415c990e623479057e4e93b163e65cfb2d4436', log_index=1, ), AaveEvent( event_type='withdrawal', asset=A_DAI, value=Balance(amount=FVal(1), usd_value=FVal(1)), block_number=2, timestamp=Timestamp(2), tx_hash='0x4147da3e5d3c0565a99192ce0b32182ab30b8e1067921d9b2a8ef3bd60b7e2ce', log_index=2, )] addr2 = make_ethereum_address() data.db.add_aave_events(address=addr1, events=addr1_events) addr2_events = [AaveEvent( event_type='deposit', asset=A_DAI, value=Balance(amount=FVal(1), usd_value=FVal(1)), block_number=1, timestamp=Timestamp(1), tx_hash='0x8c094d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55', log_index=1, ), AaveEvent( event_type='withdrawal', asset=A_DAI, value=Balance(amount=FVal(1), usd_value=FVal(1)), block_number=2, timestamp=Timestamp(2), tx_hash='0x58c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', log_index=2, )] data.db.add_aave_events(address=addr2, events=addr2_events) events = data.db.get_aave_events(address=addr1, atoken=EthereumToken('aDAI')) assert events == addr1_events events = data.db.get_aave_events(address=addr2, atoken=EthereumToken('aDAI')) assert events == addr2_events
def test_upgrade_db_1_to_2(data_dir, username): """Test upgrading the DB from version 1 to version 2, which means that ethereum accounts are now checksummed""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) with creation_patch, target_patch(1): data.unlock(username, '123', create_new=True) # Manually input a non checksummed account data.db.conn.commit() data.db.add_blockchain_account( SupportedBlockchain.ETHEREUM, '0xe3580c38b0106899f45845e361ea7f8a0062ef12', ) # now relogin and check that the account has been re-saved as checksummed del data data = DataHandler(data_dir, msg_aggregator) with target_patch(target_version=2): data.unlock(username, '123', create_new=False) accounts = data.db.get_blockchain_accounts() assert accounts.eth[0] == '0xe3580C38B0106899F45845E361EA7F8a0062Ef12' version = data.db.get_version() # Also make sure that we have updated to the target_version assert version == 2
def test_upgrade_db_3_to_4(data_dir, username): """Test upgrading the DB from version 3 to version 4, which means that the eth_rpc_port setting is changed to eth_rpc_endpoint""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) with creation_patch, target_patch(3): data.unlock(username, '123', create_new=True) # Manually set version and input the old rpcport setting cursor = data.db.conn.cursor() cursor.execute( 'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)', ('version', str(3)), ) cursor.execute( 'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)', ('eth_rpc_port', '8585'), ) data.db.conn.commit() # now relogin and check that the setting has been changed and the version bumped del data data = DataHandler(data_dir, msg_aggregator) with target_patch(target_version=4): data.unlock(username, '123', create_new=False) cursor = data.db.conn.cursor() query = cursor.execute( 'SELECT value FROM settings where name="eth_rpc_endpoint";') query = query.fetchall() assert query[0][0] == 'http://localhost:8585' query = cursor.execute( 'SELECT value FROM settings where name="eth_rpc_port";') query = query.fetchall() assert len(query) == 0 version = data.db.get_version() # Also make sure that we have updated to the target_version assert version == 4
def test_get_netvalue_data(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) add_starting_balances(data) times, values = data.db.get_netvalue_data() assert len(times) == 3 assert times[0] == 1451606400 assert times[1] == 1461606500 assert times[2] == 1491607800 assert len(values) == 3 assert values[0] == '1500' assert values[1] == '4500' assert values[2] == '10700.5'
def test_export_import_db(data_dir, username): """Create a DB, write some data and then after export/import confirm it's there""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) data.set_fiat_balances({A_EUR: AssetAmount(FVal('10'))}) encoded_data, _ = data.compress_and_encrypt_db('123') # The server would return them decoded encoded_data = encoded_data.decode() data.decompress_and_decrypt_db('123', encoded_data) fiat_balances = data.get_fiat_balances() assert len(fiat_balances) == 1 assert int(fiat_balances[A_EUR]) == 10
def test_settings_entry_types(data_dir, username, accountant): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) success = data.set_settings( { 'last_write_ts': 1, 'premium_should_sync': True, 'include_crypto2crypto': True, 'last_data_upload_ts': 1, 'ui_floating_precision': 1, 'taxfree_after_period': 1, 'historical_data_start': '01/08/2015', 'eth_rpc_endpoint': 'http://localhost:8545', 'balance_save_frequency': 24, 'anonymized_logs': True, 'date_display_format': '%d/%m/%Y %H:%M:%S %z', }, accountant=accountant) assert success res = data.db.get_settings() assert isinstance(res.version, int) assert res.version == ROTKEHLCHEN_DB_VERSION assert isinstance(res.last_write_ts, int) assert isinstance(res.premium_should_sync, bool) assert res.premium_should_sync is True assert isinstance(res.include_crypto2crypto, bool) assert res.include_crypto2crypto is True assert isinstance(res.ui_floating_precision, int) assert res.ui_floating_precision == 1 assert isinstance(res.taxfree_after_period, int) assert res.taxfree_after_period == 1 assert isinstance(res.historical_data_start, str) assert res.historical_data_start == '01/08/2015' assert isinstance(res.eth_rpc_endpoint, str) assert res.eth_rpc_endpoint == 'http://localhost:8545' assert isinstance(res.balance_save_frequency, int) assert res.balance_save_frequency == 24 assert isinstance(res.last_balance_save, int) assert res.last_balance_save == 0 assert isinstance(res.main_currency, str) assert res.main_currency == 'USD' assert isinstance(res.anonymized_logs, bool) assert res.anonymized_logs is True assert isinstance(res.date_display_format, str) assert res.date_display_format == '%d/%m/%Y %H:%M:%S %z'