def test_timed_balances_primary_key_works(user_data_dir): msg_aggregator = MessagesAggregator() db = DBHandler(user_data_dir, '123', msg_aggregator, None) balances = [ AssetBalance( category=BalanceType.ASSET, time=1590676728, asset=A_BTC, amount='1.0', usd_value='8500', ), AssetBalance( category=BalanceType.ASSET, time=1590676728, asset=A_BTC, amount='1.1', usd_value='9100', ), ] db.add_multiple_balances(balances) warnings = msg_aggregator.consume_warnings() errors = msg_aggregator.consume_errors() assert len(warnings) == 1 assert len(errors) == 0 balances = db.query_timed_balances(asset=A_BTC) assert len(balances) == 1 balances = [ AssetBalance( category=BalanceType.ASSET, time=1590676728, asset=A_ETH, amount='1.0', usd_value='8500', ), AssetBalance( category=BalanceType.LIABILITY, time=1590676728, asset=A_ETH, amount='1.1', usd_value='9100', ), ] db.add_multiple_balances(balances) warnings = msg_aggregator.consume_warnings() errors = msg_aggregator.consume_errors() assert len(warnings) == 0 assert len(errors) == 0 balances = db.query_timed_balances(asset=A_ETH) assert len(balances) == 2
def test_multiple_location_data_and_balances_same_timestamp( data_dir, username): """Test that adding location and balance data with same timestamp does not crash. Regression test for https://github.com/rotki/rotki/issues/1043 """ msg_aggregator = MessagesAggregator() username = '******' userdata_dir = os.path.join(data_dir, username) os.mkdir(userdata_dir) db = DBHandler(userdata_dir, '123', msg_aggregator) balances = [ AssetBalance( time=1590676728, asset=A_BTC, amount='1.0', usd_value='8500', ), AssetBalance( time=1590676728, asset=A_BTC, amount='1.1', usd_value='9100', ), ] db.add_multiple_balances(balances) balances = db.query_timed_balances(from_ts=0, to_ts=1590676728, asset=A_BTC) assert len(balances) == 1 locations = [ LocationData( time=1590676728, location='H', usd_value='55', ), LocationData( time=1590676728, location='H', usd_value='56', ), ] db.add_multiple_location_data(locations) locations = db.get_latest_location_value_distribution() assert len(locations) == 1 assert locations[0].usd_value == '55'
def get_latest_asset_value_distribution(self) -> List[AssetBalance]: """Gets the latest asset distribution data Returns a list of `AssetBalance` all at the latest timestamp. Essentially this returns the distribution of netvalue across all assets The list is sorted by usd value going from higher to lower """ cursor = self.conn.cursor() results = cursor.execute( 'SELECT time, currency, amount, usd_value FROM timed_balances WHERE ' 'time=(SELECT MAX(time) from timed_balances) ORDER BY ' 'CAST(usd_value AS REAL) DESC;', ) results = results.fetchall() assets = [] for result in results: assets.append( AssetBalance( time=result[0], asset=Asset(result[1]), amount=result[2], usd_value=result[3], ), ) return assets
def test_query_owned_assets(data_dir, username): """Test the get_owned_assets with also an unknown asset in the DB""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) balances = deepcopy(asset_balances) balances.extend([ AssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_BTC, amount='1', usd_value='1222.66', ), AssetBalance( category=BalanceType.ASSET, time=Timestamp(1489326500), asset=A_XMR, amount='2', usd_value='33.8', ), ]) data.db.add_multiple_balances(balances) cursor = data.db.conn.cursor() cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value, category) ' ' VALUES(?, ?, ?, ?, ?)', (1469326500, 'ADSADX', '10.1', '100.5', 'A'), ) data.db.conn.commit() assets_list = data.db.query_owned_assets() assert assets_list == [A_USD, A_ETH, A_DAI, A_BTC, A_XMR] assert all(isinstance(x, Asset) for x in assets_list) warnings = data.db.msg_aggregator.consume_warnings() assert len(warnings) == 1 assert 'Unknown/unsupported asset ADSADX' in warnings[0]
def write_balances_data(self, data: BalancesData, timestamp: Timestamp) -> None: """ The keys of the data dictionary can be any kind of asset plus 'location' and 'net_usd'. This gives us the balance data per assets, the balance data per location and finally the total balance The balances are saved in the DB at the given timestamp """ balances = [] locations = [] for key, val in data.items(): if key in ('location', 'net_usd'): continue assert isinstance( key, Asset), 'at this point the key should only be Asset type' balances.append( AssetBalance( time=timestamp, asset=key, amount=str(val['amount']), usd_value=str(val['usd_value']), )) for key2, val2 in data['location'].items(): # Here we know val2 is just a Dict since the key to data is 'location' val2 = cast(Dict, val2) locations.append( LocationData( time=timestamp, location=key2, usd_value=str(val2['usd_value']), )) locations.append( LocationData( time=timestamp, location='total', usd_value=str(data['net_usd']), )) self.add_multiple_balances(balances) self.add_multiple_location_data(locations)
def add_starting_balances(datahandler) -> List[AssetBalance]: """Adds some starting balances and other data to a testing instance""" balances = [ AssetBalance( time=Timestamp(1488326400), asset=A_BTC, amount='1', usd_value='1222.66', ), AssetBalance( time=Timestamp(1488326400), asset=A_ETH, amount='10', usd_value='4517.4', ), AssetBalance( time=Timestamp(1488326400), asset=A_EUR, amount='100', usd_value='61.5', ), AssetBalance( time=Timestamp(1488326400), asset=A_XMR, amount='5', usd_value='135.6', ), ] datahandler.db.add_multiple_balances(balances) # Also add an unknown/invalid asset. This will generate a warning cursor = datahandler.db.conn.cursor() cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', (1469326500, 'ADSADX', '10.1', '100.5'), ) datahandler.db.conn.commit() location_data = [ LocationData( time=Timestamp(1451606400), location=Location.KRAKEN.serialize_for_db(), usd_value='100', ), LocationData( time=Timestamp(1451606400), location=Location.BANKS.serialize_for_db(), usd_value='1000', ), LocationData( time=Timestamp(1461606500), location=Location.POLONIEX.serialize_for_db(), usd_value='50', ), LocationData( time=Timestamp(1461606500), location=Location.KRAKEN.serialize_for_db(), usd_value='200', ), LocationData( time=Timestamp(1461606500), location=Location.BANKS.serialize_for_db(), usd_value='50000', ), LocationData( time=Timestamp(1491607800), location=Location.POLONIEX.serialize_for_db(), usd_value='100', ), LocationData( time=Timestamp(1491607800), location=Location.KRAKEN.serialize_for_db(), usd_value='2000', ), LocationData( time=Timestamp(1491607800), location=Location.BANKS.serialize_for_db(), usd_value='10000', ), LocationData( time=Timestamp(1491607800), location=Location.BLOCKCHAIN.serialize_for_db(), usd_value='200000', ), LocationData( time=Timestamp(1451606400), location=Location.TOTAL.serialize_for_db(), usd_value='1500', ), LocationData( time=Timestamp(1461606500), location=Location.TOTAL.serialize_for_db(), usd_value='4500', ), LocationData( time=Timestamp(1491607800), location=Location.TOTAL.serialize_for_db(), usd_value='10700.5', ), ] datahandler.db.add_multiple_location_data(location_data) return balances
data.set_fiat_balances({A_EUR: ZERO}) balances = data.get_fiat_balances() assert len(balances) == 1 assert FVal(balances[A_CNY]) == amount_cny # also check that all the fiat assets in the fiat table are in # all_assets.json for fiat_asset in FIAT_CURRENCIES: assert fiat_asset.is_fiat() asset_balances = [ AssetBalance( time=Timestamp(1451606400), asset=A_USD, amount='10', usd_value='10', ), AssetBalance( time=Timestamp(1451606401), asset=A_ETH, amount='2', usd_value='1.7068', ), AssetBalance( time=Timestamp(1465171200), asset=A_USD, amount='500', usd_value='500', ), AssetBalance(
def create_fake_data(self, args: argparse.Namespace) -> None: self._clean_tables() from_ts, to_ts = StatisticsFaker._get_timestamps(args) starting_amount, min_amount, max_amount = StatisticsFaker._get_amounts( args) total_amount = starting_amount locations = [ deserialize_location(location) for location in args.locations.split(',') ] assets = [Asset(symbol) for symbol in args.assets.split(',')] go_up_probability = FVal(args.go_up_probability) # Add the first distribution of location data location_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(locations))): location_data.append( LocationData( time=from_ts, location=locations[idx].serialize_for_db(), usd_value=str(value), )) # add the location data + total to the DB self.db.add_multiple_location_data(location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), usd_value=str(total_amount), ) ]) # Add the first distribution of assets assets_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(assets))): assets_data.append( AssetBalance( time=from_ts, asset=assets[idx], amount=str(random.randint(1, 20)), usd_value=str(value), )) self.db.add_multiple_balances(assets_data) while from_ts < to_ts: print( f'At timestamp: {from_ts}/{to_ts} wih total net worth: ${total_amount}' ) new_location_data = [] new_assets_data = [] from_ts += args.seconds_between_balance_save # remaining_loops = to_ts - from_ts / args.seconds_between_balance_save add_usd_value = random.choice([100, 350, 500, 625, 725, 915, 1000]) add_amount = random.choice([ FVal('0.1'), FVal('0.23'), FVal('0.34'), FVal('0.69'), FVal('1.85'), FVal('2.54'), ]) go_up = ( # If any asset's usd value is close to to go below zero, go up any( FVal(a.usd_value) - FVal(add_usd_value) < 0 for a in assets_data) or # If total is going under the min amount go up total_amount - add_usd_value < min_amount or # If "dice roll" matched and we won't go over the max amount go up (add_usd_value + total_amount < max_amount and FVal(random.random()) <= go_up_probability)) if go_up: total_amount += add_usd_value action = operator.add else: total_amount -= add_usd_value action = operator.sub for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(locations))): new_location_data.append( LocationData( time=from_ts, location=location_data[idx].location, usd_value=str( action(FVal(location_data[idx].usd_value), value)), )) # add the location data + total to the DB self.db.add_multiple_location_data(new_location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), usd_value=str(total_amount), ) ]) for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(assets))): old_amount = FVal(assets_data[idx].amount) new_amount = action(old_amount, add_amount) if new_amount < FVal('0'): new_amount = old_amount + FVal('0.01') new_assets_data.append( AssetBalance( time=from_ts, asset=assets[idx], amount=str(new_amount), usd_value=str( action(FVal(assets_data[idx].usd_value), value)), )) self.db.add_multiple_balances(new_assets_data) location_data = new_location_data assets_data = new_assets_data
sql_mock.return_value = ConnectionMock('5.10.13 somethingelse') assert detect_sqlcipher_version() == 5 sql_mock.return_value = ConnectionMock('3.1.15 somethingelse') assert detect_sqlcipher_version() == 3 with pytest.raises(ValueError): sql_mock.return_value = ConnectionMock('no version') detect_sqlcipher_version() asset_balances = [ AssetBalance( category=BalanceType.ASSET, time=Timestamp(1451606400), asset=A_USD, amount='10', usd_value='10', ), AssetBalance( category=BalanceType.ASSET, time=Timestamp(1451606401), asset=A_ETH, amount='2', usd_value='1.7068', ), AssetBalance( category=BalanceType.ASSET, time=Timestamp(1465171200), asset=A_USD, amount='500',
def test_query_owned_assets(data_dir, username): """Test the get_owned_assets with also an unknown asset in the DB""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) balances = deepcopy(asset_balances) balances.extend([ AssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_BTC, amount='1', usd_value='1222.66', ), AssetBalance( category=BalanceType.ASSET, time=Timestamp(1489326500), asset=A_XMR, amount='2', usd_value='33.8', ), ]) data.db.add_multiple_balances(balances) cursor = data.db.conn.cursor() cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value, category) ' ' VALUES(?, ?, ?, ?, ?)', (1469326500, 'ADSADX', '10.1', '100.5', 'A'), ) data.db.conn.commit() # also make sure that assets from trades are included data.db.add_trades([ Trade( timestamp=Timestamp(1), location=Location.EXTERNAL, pair=TradePair('ETH_BTC'), trade_type=TradeType.BUY, amount=AssetAmount(FVal(1)), rate=Price(FVal(1)), fee=Fee(FVal('0.1')), fee_currency=A_BTC, link='', notes='', ), Trade( timestamp=Timestamp(99), location=Location.EXTERNAL, pair=TradePair('ETH_BTC'), trade_type=TradeType.BUY, amount=AssetAmount(FVal(2)), rate=Price(FVal(1)), fee=Fee(FVal('0.1')), fee_currency=A_BTC, link='', notes='', ), Trade( timestamp=Timestamp(1), location=Location.EXTERNAL, pair=TradePair('SDC_SDT-2'), trade_type=TradeType.BUY, amount=AssetAmount(FVal(1)), rate=Price(FVal(1)), fee=Fee(FVal('0.1')), fee_currency=A_BTC, link='', notes='', ), Trade( timestamp=Timestamp(1), location=Location.EXTERNAL, pair=TradePair('SUSHI_1INCH'), trade_type=TradeType.BUY, amount=AssetAmount(FVal(1)), rate=Price(FVal(1)), fee=Fee(FVal('0.1')), fee_currency=A_BTC, link='', notes='', ), Trade( timestamp=Timestamp(3), location=Location.EXTERNAL, pair=TradePair('SUSHI_1INCH'), trade_type=TradeType.BUY, amount=AssetAmount(FVal(2)), rate=Price(FVal(1)), fee=Fee(FVal('0.1')), fee_currency=A_BTC, link='', notes='', ), Trade( timestamp=Timestamp(1), location=Location.EXTERNAL, pair=TradePair('UNKNOWNTOKEN_BTC'), trade_type=TradeType.BUY, amount=AssetAmount(FVal(1)), rate=Price(FVal(1)), fee=Fee(FVal('0.1')), fee_currency=A_BTC, link='', notes='', ), ]) assets_list = data.db.query_owned_assets() assert set(assets_list) == { A_USD, A_ETH, A_DAI, A_BTC, A_XMR, Asset('SDC'), Asset('SDT-2'), Asset('SUSHI'), Asset('1INCH') } # noqa: E501 assert all(isinstance(x, Asset) for x in assets_list) warnings = data.db.msg_aggregator.consume_warnings() assert len(warnings) == 1 assert 'Unknown/unsupported asset ADSADX' in warnings[0]