def test_deserialize_location(database): balances = [] for idx, data in enumerate(Location): assert Location.deserialize(str(data)) == data balances.append(ManuallyTrackedBalance( id=-1, asset=A_BTC, label='Test' + str(idx), amount=FVal(1), location=data, tags=None, balance_type=BalanceType.ASSET, )) with pytest.raises(DeserializationError): Location.deserialize('dsadsad') with pytest.raises(DeserializationError): Location.deserialize(15) # Also write and read each location to DB to make sure that # location.serialize_for_db() and deserialize_location_from_db work fine add_manually_tracked_balances(database, balances) balances = database.get_manually_tracked_balances() for data in Location: assert data in (x.location for x in balances)
def deserialize_trade(data: Dict[str, Any]) -> Trade: """ Takes a dict trade representation of our common trade format and serializes it into the Trade object May raise: - UnknownAsset: If the base, quote, fee asset string is not a known asset - DeserializationError: If any of the trade dict entries is not as expected """ rate = deserialize_price(data['rate']) amount = deserialize_asset_amount(data['amount']) trade_type = TradeType.deserialize(data['trade_type']) location = Location.deserialize(data['location']) trade_link = '' if 'link' in data: trade_link = data['link'] trade_notes = '' if 'notes' in data: trade_notes = data['notes'] return Trade( timestamp=data['timestamp'], location=location, base_asset=Asset(data['base_asset']), quote_asset=Asset(data['quote_asset']), trade_type=trade_type, amount=amount, rate=rate, fee=deserialize_fee(data['fee']), fee_currency=Asset(data['fee_currency']), link=trade_link, notes=trade_notes, )
def serialize( self, export_data: Optional[Tuple[Asset, Price]] = None) -> Dict[str, str]: if export_data: return { 'timestamp': timestamp_to_date(self.time, '%Y-%m-%d %H:%M:%S'), 'location': Location.deserialize_from_db(self.location).serialize(), f'{export_data[0].symbol.lower()}_value': str(FVal(self.usd_value) * export_data[1]), # noqa: 501 } return { 'timestamp': str(self.time), 'location': Location.deserialize_from_db(self.location).serialize(), 'usd_value': self.usd_value, }
def deserialize_from_db(cls, entry: BINANCE_PAIR_DB_TUPLE) -> 'BinancePair': """Create a BinancePair from data in the database. May raise: - DeserializationError - UnsupportedAsset - UnknownAsset """ return BinancePair( symbol=entry[0], base_asset=asset_from_binance(entry[1]), quote_asset=asset_from_binance(entry[2]), location=Location.deserialize_from_db(entry[3]), )
def get_all_binance_pairs(self, location: Location) -> List['BinancePair']: """Gets all possible binance pairs from the GlobalDB. NB: This is not the user-selected binance pairs. This is just a cache. """ cursor = self.db.conn.cursor() cursor.execute( 'SELECT pair, base_asset, quote_asset, location FROM binance_pairs WHERE location=?', location.serialize_for_db(), ) pairs = [] for pair in cursor: try: pairs.append(BinancePair.deserialize_from_db(pair)) except DeserializationError as e: log.debug( f'Failed to deserialize binance pair {pair}. {str(e)}') return pairs
def deserialize_from_db( cls, trade_tuple: AMMSwapDBTuple, ) -> 'AMMSwap': """Turns a tuple read from DB into an appropriate Swap. May raise a DeserializationError if something is wrong with the DB data Trade_tuple index - Schema columns ---------------------------------- 0 - tx_hash 1 - log_index 2 - address 3 - from_address 4 - to_address 5 - timestamp 6 - location 7 - token0_identifier 8 - token1_identifier 9 - amount0_in 10 - amount1_in 11 - amount0_out 12 - amount1_out """ address = deserialize_ethereum_address(trade_tuple[2]) from_address = deserialize_ethereum_address(trade_tuple[3]) to_address = deserialize_ethereum_address(trade_tuple[4]) token0 = deserialize_ethereum_token_from_db(identifier=trade_tuple[7]) token1 = deserialize_ethereum_token_from_db(identifier=trade_tuple[8]) return cls( tx_hash=trade_tuple[0], log_index=trade_tuple[1], address=address, from_address=from_address, to_address=to_address, timestamp=deserialize_timestamp(trade_tuple[5]), location=Location.deserialize_from_db(trade_tuple[6]), token0=token0, token1=token1, amount0_in=deserialize_asset_amount(trade_tuple[9]), amount1_in=deserialize_asset_amount(trade_tuple[10]), amount0_out=deserialize_asset_amount(trade_tuple[11]), amount1_out=deserialize_asset_amount(trade_tuple[12]), )
def deserialize_from_db(cls, entry: AssetMovementDBTuple) -> 'AssetMovement': """May raise: - DeserializationError - UnknownAsset """ return AssetMovement( location=Location.deserialize_from_db(entry[1]), category=AssetMovementCategory.deserialize_from_db(entry[2]), address=entry[3], transaction_id=entry[4], timestamp=Timestamp(entry[5]), asset=Asset(entry[6]), amount=deserialize_asset_amount(entry[7]), fee_asset=Asset(entry[8]), fee=deserialize_fee(entry[9]), link=entry[10], )
def deserialize_from_db(cls, entry: TradeDBTuple) -> 'Trade': """May raise: - DeserializationError - UnknownAsset """ return Trade( timestamp=deserialize_timestamp(entry[1]), location=Location.deserialize_from_db(entry[2]), base_asset=Asset(entry[3]), quote_asset=Asset(entry[4]), trade_type=TradeType.deserialize_from_db(entry[5]), amount=deserialize_asset_amount(entry[6]), rate=deserialize_price(entry[7]), fee=deserialize_optional(entry[8], deserialize_fee), fee_currency=deserialize_optional(entry[9], Asset), link=entry[10], notes=entry[11], )
def update_margin_positions(self, cursor: 'Cursor') -> None: """Upgrades the margin positions table to use the new asset ids if they are ethereum tokens And also makes sure the new primary key id matches the rules used in the app """ query = cursor.execute( 'SELECT id, location, open_time, close_time, profit_loss,' 'pl_currency,fee,fee_currency,link,notes from margin_positions;', ) m_tuples = query.fetchall() cursor.execute('DELETE from margin_positions;') new_tuples = [] for entry in m_tuples: new_pl_currency = self.get_new_asset_identifier_if_existing( entry[5]) new_fee_currency = self.get_new_asset_identifier_if_existing( entry[7]) # formulate the new DB identifier primary key. Copy the identifier() functionality open_time_str = 'None' if entry[2] == 0 else str(entry[2]) new_id_string = (str(Location.deserialize_from_db(entry[1])) + open_time_str + str(entry[3]) + new_pl_currency + new_fee_currency + entry[8]) new_id = hash_id(new_id_string) new_tuples.append(( new_id, entry[1], entry[2], entry[3], entry[4], new_pl_currency, entry[6], new_fee_currency, entry[8], entry[9], )) cursor.executemany( 'INSERT INTO margin_positions(' 'id, location, open_time, close_time, profit_loss,' 'pl_currency,fee,fee_currency,link,notes) ' 'VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?);', new_tuples, )
def update_asset_movements(self, cursor: 'Cursor') -> None: """Upgrades the asset movements table to use the new asset ids if they are ethereum tokens And also makes sure the new primary key id matches the rules used in the app """ query = cursor.execute( 'SELECT id, location, category, address, transaction_id, time,' 'asset,amount,fee_asset,fee,link from asset_movements;', ) m_tuples = query.fetchall() cursor.execute('DELETE from asset_movements;') new_tuples = [] for entry in m_tuples: new_asset = self.get_new_asset_identifier_if_existing(entry[6]) new_fee_asset = self.get_new_asset_identifier_if_existing(entry[8]) # formulate the new DB identifier primary key. Copy the identifier() functionality new_id_string = ( str(Location.deserialize_from_db(entry[1])) + str(AssetMovementCategory.deserialize_from_db(entry[2])) + str(entry[5]) + new_asset + new_fee_asset + entry[10]) new_id = hash_id(new_id_string) new_tuples.append(( new_id, entry[1], entry[2], entry[3], entry[4], entry[5], new_asset, entry[7], new_fee_asset, entry[9], entry[10], )) cursor.executemany( 'INSERT INTO asset_movements(' 'id, location, category, address, transaction_id, time,' 'asset, amount, fee_asset, fee, link) ' 'VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);', new_tuples, )
def deserialize_from_db( cls, data: LedgerActionDBTupleWithIdentifier, ) -> 'LedgerAction': """May raise: - DeserializationError - UnknownAsset """ return cls( identifier=data[0], timestamp=deserialize_timestamp(data[1]), action_type=LedgerActionType.deserialize_from_db(data[2]), location=Location.deserialize_from_db(data[3]), amount=deserialize_asset_amount(data[4]), asset=Asset(data[5]), rate=deserialize_optional(data[6], deserialize_price), rate_asset=deserialize_optional(data[7], Asset), link=data[8], notes=data[9], )
def deserialize_from_db( cls, entry: HISTORY_EVENT_DB_TUPLE_READ) -> 'HistoryBaseEntry': """May raise: - DeserializationError - UnknownAsset """ extra_data = None if entry[13] is not None: try: extra_data = json.loads(entry[13]) except json.JSONDecodeError as e: log.debug( f'Failed to read extra_data when reading HistoryBaseEntry entry ' f'{entry} from the DB due to {str(e)}. Setting it to null', ) try: return HistoryBaseEntry( identifier=entry[0], event_identifier=entry[1], sequence_index=entry[2], timestamp=TimestampMS(entry[3]), location=Location.deserialize_from_db(entry[4]), location_label=entry[5], # Setting incomplete data to true since we save all history events, # regardless of the type of token that it may involve asset=Asset(entry[6], form_with_incomplete_data=True), balance=Balance( amount=FVal(entry[7]), usd_value=FVal(entry[8]), ), notes=entry[9], event_type=HistoryEventType.deserialize(entry[10]), event_subtype=HistoryEventSubType.deserialize(entry[11]), counterparty=entry[12], extra_data=extra_data, ) except ValueError as e: raise DeserializationError( f'Failed to read FVal value from database history event with ' f'event identifier {entry[1]}. {str(e)}', ) from e
def deserialize_from_db(cls, entry: MarginPositionDBTuple) -> 'MarginPosition': """May raise: - DeserializationError - UnknownAsset """ if entry[2] == 0: open_time = None else: open_time = deserialize_timestamp(entry[2]) return MarginPosition( location=Location.deserialize_from_db(entry[1]), open_time=open_time, close_time=deserialize_timestamp(entry[3]), profit_loss=deserialize_asset_amount(entry[4]), pl_currency=Asset(entry[5]), fee=deserialize_fee(entry[6]), fee_currency=Asset(entry[7]), link=entry[8], notes=entry[9], )
def _import_snapshot( self, balances_list: List[Dict[str, str]], location_data_list: List[Dict[str, str]], ) -> Tuple[bool, str]: """Import the validated snapshot data to the database.""" processed_balances_list = [] processed_location_data_list = [] try: for entry in balances_list: if entry['asset_identifier'].startswith(NFT_DIRECTIVE): self.db.add_asset_identifiers([entry['asset_identifier']]) processed_balances_list.append( DBAssetBalance( category=BalanceType.deserialize(entry['category']), time=Timestamp(int(entry['timestamp'])), asset=Asset(identifier=entry['asset_identifier']), amount=entry['amount'], usd_value=str(FVal(entry['usd_value'])), ), ) except UnknownAsset as err: return False, f'snapshot contains an unknown asset ({err.asset_name}). Try adding this asset manually.' # noqa: 501 for entry in location_data_list: processed_location_data_list.append( LocationData( time=Timestamp(int(entry['timestamp'])), location=Location.deserialize( entry['location']).serialize_for_db(), usd_value=str(FVal(entry['usd_value'])), ), ) try: self.db.add_multiple_balances(processed_balances_list) self.db.add_multiple_location_data(processed_location_data_list) except InputError as err: return False, str(err) return True, ''
def deserialize_from_db(cls: Type[T], timestamp: Timestamp, stringified_json: str) -> T: """May raise: - DeserializationError if something is wrong with reading this from the DB """ try: data = json.loads(stringified_json) except json.decoder.JSONDecodeError as e: raise DeserializationError( f'Could not decode processed accounting event json from the DB due to {str(e)}', ) from e try: pnl_taxable = deserialize_fval(data['pnl_taxable'], name='pnl_taxable', location='processed event decoding') # noqa: E501 pnl_free = deserialize_fval(data['pnl_free'], name='pnl_free', location='processed event decoding') # noqa: E501 if data['cost_basis'] is None: cost_basis = None else: cost_basis = CostBasisInfo.deserialize(data['cost_basis']) event = cls( type=AccountingEventType.deserialize(data['type']), notes=data['notes'], location=Location.deserialize(data['location']), timestamp=timestamp, asset=Asset(data['asset']), free_amount=deserialize_fval(data['free_amount'], name='free_amount', location='processed event decoding'), # noqa: E501 taxable_amount=deserialize_fval(data['taxable_amount'], name='taxable_amount', location='processed event decoding'), # noqa: E501 price=deserialize_price(data['price']), pnl=PNL(free=pnl_free, taxable=pnl_taxable), cost_basis=cost_basis, index=data['index'], extra_data=data['extra_data'], ) event.count_cost_basis_pnl = data['count_cost_basis_pnl'] event.count_entire_amount_spend = data['count_entire_amount_spend'] return event except KeyError as e: raise DeserializationError(f'Could not decode processed accounting event json from the DB due to missing key {str(e)}') from e # noqa: E501
def _process_entry(entry: Any) -> Union[str, List[Any], Dict[str, Any], Any]: if isinstance(entry, FVal): return str(entry) if isinstance(entry, list): new_list = [] for new_entry in entry: new_list.append(_process_entry(new_entry)) return new_list if isinstance(entry, (dict, AttributeDict)): new_dict = {} for k, v in entry.items(): if isinstance(k, Asset): k = k.identifier new_dict[k] = _process_entry(v) return new_dict if isinstance(entry, HexBytes): return entry.hex() if isinstance(entry, LocationData): return { 'time': entry.time, 'location': str(Location.deserialize_from_db(entry.location)), 'usd_value': entry.usd_value, } if isinstance(entry, SingleDBAssetBalance): return { 'time': entry.time, 'category': str(entry.category), 'amount': entry.amount, 'usd_value': entry.usd_value, } if isinstance(entry, DBAssetBalance): return { 'time': entry.time, 'category': str(entry.category), 'asset': entry.asset.identifier, 'amount': entry.amount, 'usd_value': entry.usd_value, } if isinstance(entry, ( DefiProtocol, MakerdaoVault, XpubData, Eth2Deposit, StakingEvent, )): return entry.serialize() if isinstance(entry, ( Trade, EthereumTransaction, MakerdaoVault, DSRAccountReport, Balance, AaveLendingBalance, AaveBorrowingBalance, CompoundBalance, YearnVaultEvent, YearnVaultBalance, AaveEvent, UniswapPool, UniswapPoolAsset, AMMTrade, UniswapPoolEventsBalance, ADXStakingHistory, BalancerBPTEventPoolToken, BalancerEvent, BalancerPoolEventsBalance, BalancerPoolBalance, BalancerPoolTokenBalance, LiquityTroveEvent, LiquityStakeEvent, ManuallyTrackedBalanceWithValue, Trove, StakePosition, DillBalance, NFTResult, ExchangeLocationID, )): return process_result(entry.serialize()) if isinstance(entry, ( DBSettings, CompoundEvent, VersionCheckResult, DBSettings, DSRCurrentBalances, VaultEvent, MakerdaoVaultDetails, AaveBalances, AaveHistory, DefiBalance, DefiProtocolBalances, YearnVaultHistory, BlockchainAccountData, )): return process_result(entry._asdict()) if isinstance(entry, tuple): raise ValueError('Query results should not contain plain tuples') if isinstance(entry, Asset): return entry.identifier if isinstance(entry, ( TradeType, Location, KrakenAccountType, Location, VaultEventType, AssetMovementCategory, CurrentPriceOracle, HistoricalPriceOracle, LedgerActionType, TroveOperation, LiquityStakeEvent, TroveOperation, LiquityStakeEventType, BalanceType, )): return str(entry) # else return entry
def update_trades(self, cursor: 'Cursor') -> None: """Upgrades the trades table to use base/quote asset instead of a pair Also upgrades all asset ids if they are ethereum tokens And also makes sure the new primary key id matches the rules used in the app """ # Get all old data and transform it to the new schema query = cursor.execute( 'SELECT id, ' ' time, ' ' location, ' ' pair, ' ' type, ' ' amount, ' ' rate, ' ' fee, ' ' fee_currency, ' ' link, ' ' notes from trades; ', ) new_trade_tuples = [] for entry in query: try: base, quote = pair_get_asset_ids(entry[3]) except ValueError as e: self.msg_aggregator.add_warning( f'During v24 -> v25 DB upgrade {str(e)}. This should not have happened.' f' Removing the trade with id {entry[0]} at timestamp {entry[1]} ' f'and location {str(Location.deserialize_from_db(entry[2]))} that ' f'contained the offending pair from the DB.', ) continue new_id = self.get_new_asset_identifier(base) new_base = new_id if new_id else base new_id = self.get_new_asset_identifier(quote) new_quote = new_id if new_id else quote new_id = self.get_new_asset_identifier(entry[8]) new_fee_currency = new_id if new_id else entry[8] timestamp = entry[1] amount = entry[5] rate = entry[6] old_link = entry[9] link = None if old_link == '' else old_link notes = None if entry[10] == '' else entry[10] # Copy the identifier() functionality. This identifier does not sound like a good idea new_trade_id_string = ( str(Location.deserialize_from_db(entry[2])) + str(timestamp) + str(TradeType.deserialize_from_db(entry[4])) + new_base + new_quote + amount + rate + old_link) new_trade_id = hash_id(new_trade_id_string) new_trade_tuples.append(( new_trade_id, entry[1], # time entry[2], # location new_base, new_quote, entry[4], # type amount, rate, entry[7], # fee new_fee_currency, link, notes, )) # Upgrade the table cursor.execute('DROP TABLE IF EXISTS trades;') cursor.execute(""" CREATE TABLE IF NOT EXISTS trades ( id TEXT PRIMARY KEY NOT NULL, time INTEGER NOT NULL, location CHAR(1) NOT NULL DEFAULT('A') REFERENCES location(location), base_asset TEXT NOT NULL, quote_asset TEXT NOT NULL, type CHAR(1) NOT NULL DEFAULT ('A') REFERENCES trade_type(type), amount TEXT NOT NULL, rate TEXT NOT NULL, fee TEXT, fee_currency TEXT, link TEXT, notes TEXT ); """) # Insert the new data executestr = """ INSERT INTO trades( id, time, location, base_asset, quote_asset, type, amount, rate, fee, fee_currency, link, notes) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """ cursor.executemany(executestr, new_trade_tuples)
def assert_csv_export_response(response, csv_dir, main_currency: Asset, is_download=False): if is_download: assert response.status_code == HTTPStatus.OK else: assert_simple_ok_response(response) with open(os.path.join(csv_dir, BALANCES_FILENAME), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 5 assert row['category'] in ( 'asset', 'liability', ) assert row['amount'] is not None assert row['asset'] is not None assert row['timestamp'] is not None assert row[f'{main_currency.symbol.lower()}_value'] is not None count += 1 assert count == 2 with open(os.path.join(csv_dir, BALANCES_FOR_IMPORT_FILENAME), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 5 assert row['category'] in ( 'asset', 'liability', ) assert row['amount'] is not None assert row['asset_identifier'] is not None assert row['timestamp'] is not None assert row['usd_value'] is not None count += 1 assert count == 2 with open(os.path.join(csv_dir, LOCATION_DATA_FILENAME), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 3 assert row['timestamp'] is not None assert Location.deserialize(row['location']) is not None assert row[f'{main_currency.symbol.lower()}_value'] is not None count += 1 assert count == 3 with open(os.path.join(csv_dir, LOCATION_DATA_IMPORT_FILENAME), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 3 assert row['timestamp'] is not None assert Location.deserialize(row['location']) is not None assert row['usd_value'] is not None count += 1 assert count == 3
def data_migration_1(rotki: 'Rotkehlchen') -> None: """ Purge data for exchanges where there is more than one instance. Also purge information from kraken as requested for https://github.com/rotki/rotki/pull/3755 """ exchange_re = re.compile( r'(.*?)_(trades|margins|asset_movements|ledger_actions).*') db = rotki.data.db cursor = db.conn.cursor() used_ranges = cursor.execute('SELECT * from used_query_ranges').fetchall() credentials_result = cursor.execute('SELECT * from user_credentials') location_to_name = {} multiple_locations = set() for result in credentials_result: try: location = Location.deserialize_from_db(result[1]) except DeserializationError as e: log.error( f'During data migration 1 found location {result[1]} ' f'that could not be deserialized due to {str(e)}', ) continue if location in location_to_name: multiple_locations.add(location) else: location_to_name[location] = result[0] for used_range in used_ranges: range_name = used_range[0] match = exchange_re.search(range_name) if match is None: continue location_str = match.group(1) entry_type = match.group(2) try: location = Location.deserialize(location_str) except DeserializationError as e: log.error( f'During data migration 1 could not deserialize location ' f'string {location_str} to location due to {str(e)}', ) continue if location not in location_to_name: if location in SUPPORTED_EXCHANGES: # Can happen if there is a stray used_query_range from a non-connected exchange cursor.execute('DELETE FROM used_query_ranges WHERE name=?', (range_name, )) # in any case continue. Can also be non CEX location such as uniswap/balancer continue if location in multiple_locations or location == Location.KRAKEN: db.purge_exchange_data(location) db.delete_used_query_range_for_exchange(location) else: cursor.execute( 'UPDATE used_query_ranges SET name=? WHERE name=?', (f'{location_str}_{entry_type}_{location_to_name[location]}', range_name), ) db.conn.commit()
def create_fake_data(self, args: argparse.Namespace) -> None: self._clean_tables() from_ts, to_ts = StatisticsFaker._get_timestamps(args) starting_amount, min_amount, max_amount = StatisticsFaker._get_amounts( args) total_amount = starting_amount locations = [ Location.deserialize(location) for location in args.locations.split(',') ] assets = [Asset(symbol) for symbol in args.assets.split(',')] go_up_probability = FVal(args.go_up_probability) # Add the first distribution of location data location_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(locations))): location_data.append( LocationData( time=from_ts, location=locations[idx].serialize_for_db(), usd_value=str(value), )) # add the location data + total to the DB self.db.add_multiple_location_data(location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value=str(total_amount), ) ]) # Add the first distribution of assets assets_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(assets))): assets_data.append( DBAssetBalance( category=BalanceType.ASSET, time=from_ts, asset=assets[idx], amount=str(random.randint(1, 20)), usd_value=str(value), )) self.db.add_multiple_balances(assets_data) while from_ts < to_ts: print( f'At timestamp: {from_ts}/{to_ts} wih total net worth: ${total_amount}' ) new_location_data = [] new_assets_data = [] from_ts += args.seconds_between_balance_save # remaining_loops = to_ts - from_ts / args.seconds_between_balance_save add_usd_value = random.choice([100, 350, 500, 625, 725, 915, 1000]) add_amount = random.choice([ FVal('0.1'), FVal('0.23'), FVal('0.34'), FVal('0.69'), FVal('1.85'), FVal('2.54'), ]) go_up = ( # If any asset's usd value is close to to go below zero, go up any( FVal(a.usd_value) - FVal(add_usd_value) < 0 for a in assets_data) or # If total is going under the min amount go up total_amount - add_usd_value < min_amount or # If "dice roll" matched and we won't go over the max amount go up (add_usd_value + total_amount < max_amount and FVal(random.random()) <= go_up_probability)) if go_up: total_amount += add_usd_value action = operator.add else: total_amount -= add_usd_value action = operator.sub for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(locations))): new_location_data.append( LocationData( time=from_ts, location=location_data[idx].location, usd_value=str( action(FVal(location_data[idx].usd_value), value)), )) # add the location data + total to the DB self.db.add_multiple_location_data(new_location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value=str(total_amount), ) ]) for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(assets))): old_amount = FVal(assets_data[idx].amount) new_amount = action(old_amount, add_amount) if new_amount < FVal('0'): new_amount = old_amount + FVal('0.01') new_assets_data.append( DBAssetBalance( category=BalanceType.ASSET, time=from_ts, asset=assets[idx], amount=str(new_amount), usd_value=str( action(FVal(assets_data[idx].usd_value), value)), )) self.db.add_multiple_balances(new_assets_data) location_data = new_location_data assets_data = new_assets_data