def add_starting_nfts(datahandler): """Adds a time series for an account owning a NFT""" datahandler.db.add_asset_identifiers(['_nft_pickle']) balances = [ DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=Asset('_nft_pickle'), amount='1', usd_value='1000', ), DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488426400), asset=Asset('_nft_pickle'), amount='1', usd_value='1000', ), DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488526400), asset=Asset('_nft_pickle'), amount='2', usd_value='2000', ), DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488626400), asset=Asset('_nft_pickle'), amount='1', usd_value='1000', ), ] datahandler.db.add_multiple_balances(balances) datahandler.db.conn.commit() location_data = [ LocationData( time=Timestamp(1488326400), location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value='3000', ), LocationData( time=Timestamp(1488426400), location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value='4000', ), LocationData( time=Timestamp(1488526400), location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value='5000', ), LocationData( time=Timestamp(1488626400), location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value='5500', ), ] datahandler.db.add_multiple_location_data(location_data)
def test_multiple_location_data_and_balances_same_timestamp(user_data_dir): """ Test that adding location and balance data with same timestamp raises an error and no balance/location is added. Regression test for https://github.com/rotki/rotki/issues/1043 """ msg_aggregator = MessagesAggregator() db = DBHandler(user_data_dir, '123', msg_aggregator, None) balances = [ DBAssetBalance( category=BalanceType.ASSET, time=1590676728, asset=A_BTC, amount='1.0', usd_value='8500', ), DBAssetBalance( category=BalanceType.ASSET, time=1590676728, asset=A_BTC, amount='1.1', usd_value='9100', ), ] with pytest.raises(InputError) as exc_info: db.add_multiple_balances(balances) assert 'Adding timed_balance failed.' in str(exc_info.value) assert exc_info.errisinstance(InputError) balances = db.query_timed_balances(from_ts=0, to_ts=1590676728, asset=A_BTC) assert len(balances) == 0 locations = [ LocationData( time=1590676728, location='H', usd_value='55', ), LocationData( time=1590676728, location='H', usd_value='56', ), ] with pytest.raises(InputError) as exc_info: db.add_multiple_location_data(locations) assert 'Tried to add a timed_location_data for' in str(exc_info.value) assert exc_info.errisinstance(InputError) locations = db.get_latest_location_value_distribution() assert len(locations) == 0
def test_multiple_location_data_and_balances_same_timestamp( data_dir, username): """Test that adding location and balance data with same timestamp does not crash. Regression test for https://github.com/rotki/rotki/issues/1043 """ msg_aggregator = MessagesAggregator() username = '******' userdata_dir = os.path.join(data_dir, username) os.mkdir(userdata_dir) db = DBHandler(userdata_dir, '123', msg_aggregator) balances = [ AssetBalance( time=1590676728, asset=A_BTC, amount='1.0', usd_value='8500', ), AssetBalance( time=1590676728, asset=A_BTC, amount='1.1', usd_value='9100', ), ] db.add_multiple_balances(balances) balances = db.query_timed_balances(from_ts=0, to_ts=1590676728, asset=A_BTC) assert len(balances) == 1 locations = [ LocationData( time=1590676728, location='H', usd_value='55', ), LocationData( time=1590676728, location='H', usd_value='56', ), ] db.add_multiple_location_data(locations) locations = db.get_latest_location_value_distribution() assert len(locations) == 1 assert locations[0].usd_value == '55'
def write_balances_data(self, data: BalancesData, timestamp: Timestamp) -> None: """ The keys of the data dictionary can be any kind of asset plus 'location' and 'net_usd'. This gives us the balance data per assets, the balance data per location and finally the total balance The balances are saved in the DB at the given timestamp """ balances = [] locations = [] for key, val in data.items(): if key in ('location', 'net_usd'): continue assert isinstance( key, Asset), 'at this point the key should only be Asset type' balances.append( AssetBalance( time=timestamp, asset=key, amount=str(val['amount']), usd_value=str(val['usd_value']), )) for key2, val2 in data['location'].items(): # Here we know val2 is just a Dict since the key to data is 'location' val2 = cast(Dict, val2) locations.append( LocationData( time=timestamp, location=key2, usd_value=str(val2['usd_value']), )) locations.append( LocationData( time=timestamp, location='total', usd_value=str(data['net_usd']), )) self.add_multiple_balances(balances) self.add_multiple_location_data(locations)
def test_balance_save_frequency_check(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) now = int(time.time()) data_save_ts = now - 24 * 60 * 60 + 20 data.db.add_multiple_location_data([LocationData( time=data_save_ts, location=Location.KRAKEN.serialize_for_db(), usd_value='1500', )]) assert not data.should_save_balances() data.db.set_settings(ModifiableDBSettings(balance_save_frequency=5)) assert data.should_save_balances() last_save_ts = data.db.get_last_balance_save_time() assert last_save_ts == data_save_ts
def test_balance_save_frequency_check(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) now = int(time.time()) data_save_ts = now - 24 * 60 * 60 + 20 data.db.add_multiple_location_data([LocationData( time=data_save_ts, location='kraken', usd_value='1500', )]) assert not data.should_save_balances() success, msg = data.set_settings({'balance_save_frequency': 5}) assert success assert msg == '', f'set settings returned error: "{msg}"' assert data.should_save_balances() last_save_ts = data.db.get_last_balance_save_time() assert last_save_ts == data_save_ts
def get_timed_location_data( self, timestamp: Timestamp, ) -> List[LocationData]: """Retrieves the timed_location_data from the db for a given timestamp.""" location_data = [] cursor = self.db.conn.cursor() timed_location_data = cursor.execute( 'SELECT time, location, usd_value FROM timed_location_data ' 'WHERE time=?', (timestamp, ), ) for data in timed_location_data: location_data.append( LocationData( time=data[0], location=data[1], usd_value=str(FVal(data[2])), ), ) return location_data
def get_latest_location_value_distribution(self) -> List[LocationData]: """Gets the latest location data Returns a list of `LocationData` all at the latest timestamp. Essentially this returns the distribution of netvalue across all locations """ cursor = self.conn.cursor() results = cursor.execute( 'SELECT time, location, usd_value FROM timed_location_data WHERE ' 'time=(SELECT MAX(time) FROM timed_location_data);', ) results = results.fetchall() locations = [] for result in results: locations.append( LocationData( time=result[0], location=result[1], usd_value=result[2], ), ) return locations
def _import_snapshot( self, balances_list: List[Dict[str, str]], location_data_list: List[Dict[str, str]], ) -> Tuple[bool, str]: """Import the validated snapshot data to the database.""" processed_balances_list = [] processed_location_data_list = [] try: for entry in balances_list: if entry['asset_identifier'].startswith(NFT_DIRECTIVE): self.db.add_asset_identifiers([entry['asset_identifier']]) processed_balances_list.append( DBAssetBalance( category=BalanceType.deserialize(entry['category']), time=Timestamp(int(entry['timestamp'])), asset=Asset(identifier=entry['asset_identifier']), amount=entry['amount'], usd_value=str(FVal(entry['usd_value'])), ), ) except UnknownAsset as err: return False, f'snapshot contains an unknown asset ({err.asset_name}). Try adding this asset manually.' # noqa: 501 for entry in location_data_list: processed_location_data_list.append( LocationData( time=Timestamp(int(entry['timestamp'])), location=Location.deserialize( entry['location']).serialize_for_db(), usd_value=str(FVal(entry['usd_value'])), ), ) try: self.db.add_multiple_balances(processed_balances_list) self.db.add_multiple_location_data(processed_location_data_list) except InputError as err: return False, str(err) return True, ''
def add_starting_balances(datahandler) -> List[AssetBalance]: """Adds some starting balances and other data to a testing instance""" balances = [ AssetBalance( time=Timestamp(1488326400), asset=A_BTC, amount='1', usd_value='1222.66', ), AssetBalance( time=Timestamp(1488326400), asset=A_ETH, amount='10', usd_value='4517.4', ), AssetBalance( time=Timestamp(1488326400), asset=A_EUR, amount='100', usd_value='61.5', ), AssetBalance( time=Timestamp(1488326400), asset=A_XMR, amount='5', usd_value='135.6', ), ] datahandler.db.add_multiple_balances(balances) # Also add an unknown/invalid asset. This will generate a warning cursor = datahandler.db.conn.cursor() cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', (1469326500, 'ADSADX', '10.1', '100.5'), ) datahandler.db.conn.commit() location_data = [ LocationData( time=Timestamp(1451606400), location=Location.KRAKEN.serialize_for_db(), usd_value='100', ), LocationData( time=Timestamp(1451606400), location=Location.BANKS.serialize_for_db(), usd_value='1000', ), LocationData( time=Timestamp(1461606500), location=Location.POLONIEX.serialize_for_db(), usd_value='50', ), LocationData( time=Timestamp(1461606500), location=Location.KRAKEN.serialize_for_db(), usd_value='200', ), LocationData( time=Timestamp(1461606500), location=Location.BANKS.serialize_for_db(), usd_value='50000', ), LocationData( time=Timestamp(1491607800), location=Location.POLONIEX.serialize_for_db(), usd_value='100', ), LocationData( time=Timestamp(1491607800), location=Location.KRAKEN.serialize_for_db(), usd_value='2000', ), LocationData( time=Timestamp(1491607800), location=Location.BANKS.serialize_for_db(), usd_value='10000', ), LocationData( time=Timestamp(1491607800), location=Location.BLOCKCHAIN.serialize_for_db(), usd_value='200000', ), LocationData( time=Timestamp(1451606400), location=Location.TOTAL.serialize_for_db(), usd_value='1500', ), LocationData( time=Timestamp(1461606500), location=Location.TOTAL.serialize_for_db(), usd_value='4500', ), LocationData( time=Timestamp(1491607800), location=Location.TOTAL.serialize_for_db(), usd_value='10700.5', ), ] datahandler.db.add_multiple_location_data(location_data) return balances
def create_fake_data(self, args: argparse.Namespace) -> None: self._clean_tables() from_ts, to_ts = StatisticsFaker._get_timestamps(args) starting_amount, min_amount, max_amount = StatisticsFaker._get_amounts( args) total_amount = starting_amount locations = [ deserialize_location(location) for location in args.locations.split(',') ] assets = [Asset(symbol) for symbol in args.assets.split(',')] go_up_probability = FVal(args.go_up_probability) # Add the first distribution of location data location_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(locations))): location_data.append( LocationData( time=from_ts, location=locations[idx].serialize_for_db(), usd_value=str(value), )) # add the location data + total to the DB self.db.add_multiple_location_data(location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), usd_value=str(total_amount), ) ]) # Add the first distribution of assets assets_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(assets))): assets_data.append( DBAssetBalance( category=BalanceType.ASSET, time=from_ts, asset=assets[idx], amount=str(random.randint(1, 20)), usd_value=str(value), )) self.db.add_multiple_balances(assets_data) while from_ts < to_ts: print( f'At timestamp: {from_ts}/{to_ts} wih total net worth: ${total_amount}' ) new_location_data = [] new_assets_data = [] from_ts += args.seconds_between_balance_save # remaining_loops = to_ts - from_ts / args.seconds_between_balance_save add_usd_value = random.choice([100, 350, 500, 625, 725, 915, 1000]) add_amount = random.choice([ FVal('0.1'), FVal('0.23'), FVal('0.34'), FVal('0.69'), FVal('1.85'), FVal('2.54'), ]) go_up = ( # If any asset's usd value is close to to go below zero, go up any( FVal(a.usd_value) - FVal(add_usd_value) < 0 for a in assets_data) or # If total is going under the min amount go up total_amount - add_usd_value < min_amount or # If "dice roll" matched and we won't go over the max amount go up (add_usd_value + total_amount < max_amount and FVal(random.random()) <= go_up_probability)) if go_up: total_amount += add_usd_value action = operator.add else: total_amount -= add_usd_value action = operator.sub for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(locations))): new_location_data.append( LocationData( time=from_ts, location=location_data[idx].location, usd_value=str( action(FVal(location_data[idx].usd_value), value)), )) # add the location data + total to the DB self.db.add_multiple_location_data(new_location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), usd_value=str(total_amount), ) ]) for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(assets))): old_amount = FVal(assets_data[idx].amount) new_amount = action(old_amount, add_amount) if new_amount < FVal('0'): new_amount = old_amount + FVal('0.01') new_assets_data.append( DBAssetBalance( category=BalanceType.ASSET, time=from_ts, asset=assets[idx], amount=str(new_amount), usd_value=str( action(FVal(assets_data[idx].usd_value), value)), )) self.db.add_multiple_balances(new_assets_data) location_data = new_location_data assets_data = new_assets_data
def add_starting_balances(datahandler) -> List[DBAssetBalance]: """Adds some starting balances and other data to a testing instance""" balances = [ DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_BTC, amount='1', usd_value='1222.66', ), DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_ETH, amount='10', usd_value='4517.4', ), DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_EUR, amount='100', usd_value='61.5', ), DBAssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_XMR, amount='5', usd_value='135.6', ), ] datahandler.db.add_multiple_balances(balances) datahandler.db.conn.commit() location_data = [ LocationData( time=Timestamp(1451606400), location=Location.KRAKEN.serialize_for_db(), # pylint: disable=no-member usd_value='100', ), LocationData( time=Timestamp(1451606400), location=Location.BANKS.serialize_for_db(), # pylint: disable=no-member usd_value='1000', ), LocationData( time=Timestamp(1461606500), location=Location.POLONIEX.serialize_for_db(), # pylint: disable=no-member usd_value='50', ), LocationData( time=Timestamp(1461606500), location=Location.KRAKEN.serialize_for_db(), # pylint: disable=no-member usd_value='200', ), LocationData( time=Timestamp(1461606500), location=Location.BANKS.serialize_for_db(), # pylint: disable=no-member usd_value='50000', ), LocationData( time=Timestamp(1491607800), location=Location.POLONIEX.serialize_for_db(), # pylint: disable=no-member usd_value='100', ), LocationData( time=Timestamp(1491607800), location=Location.KRAKEN.serialize_for_db(), # pylint: disable=no-member usd_value='2000', ), LocationData( time=Timestamp(1491607800), location=Location.BANKS.serialize_for_db(), # pylint: disable=no-member usd_value='10000', ), LocationData( time=Timestamp(1491607800), location=Location.BLOCKCHAIN.serialize_for_db(), # pylint: disable=no-member usd_value='200000', ), LocationData( time=Timestamp(1451606400), location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value='1500', ), LocationData( time=Timestamp(1461606500), location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value='4500', ), LocationData( time=Timestamp(1491607800), location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value='10700.5', ), ] datahandler.db.add_multiple_location_data(location_data) return balances