def test_deserialize_location(database): balances = [] for idx, data in enumerate(Location): assert Location.deserialize(str(data)) == data balances.append( ManuallyTrackedBalance( asset=A_BTC, label='Test' + str(idx), amount=FVal(1), location=data, tags=None, balance_type=BalanceType.ASSET, )) with pytest.raises(DeserializationError): Location.deserialize('dsadsad') with pytest.raises(DeserializationError): Location.deserialize(15) # Also write and read each location to DB to make sure that # location.serialize_for_db() and deserialize_location_from_db work fine add_manually_tracked_balances(database, balances) balances = database.get_manually_tracked_balances() for data in Location: assert data in (x.location for x in balances)
def deserialize_from_db( cls, data: LedgerActionDBTupleWithIdentifier, given_gitcoin_map: Optional[Dict[int, GitcoinEventDataDB]] = None, ) -> 'LedgerAction': """May raise: - DeserializationError - UnknownAsset """ extra_data = None gitcoin_map = {} if not given_gitcoin_map else given_gitcoin_map gitcoin_data = gitcoin_map.get(data[0], None) if gitcoin_data is not None: extra_data = GitcoinEventData.deserialize_from_db( data=gitcoin_data) return cls( identifier=data[0], timestamp=deserialize_timestamp(data[1]), action_type=LedgerActionType.deserialize_from_db(data[2]), location=Location.deserialize_from_db(data[3]), amount=deserialize_asset_amount(data[4]), asset=Asset(data[5]), rate=deserialize_optional(data[6], deserialize_price), rate_asset=deserialize_optional(data[7], Asset), link=data[8], notes=data[9], extra_data=extra_data, )
def deserialize_trade(data: Dict[str, Any]) -> Trade: """ Takes a dict trade representation of our common trade format and serializes it into the Trade object May raise: - UnknownAsset: If the base, quote, fee asset string is not a known asset - DeserializationError: If any of the trade dict entries is not as expected """ rate = deserialize_price(data['rate']) amount = deserialize_asset_amount(data['amount']) trade_type = deserialize_trade_type(data['trade_type']) location = Location.deserialize(data['location']) trade_link = '' if 'link' in data: trade_link = data['link'] trade_notes = '' if 'notes' in data: trade_notes = data['notes'] return Trade( timestamp=data['timestamp'], location=location, base_asset=Asset(data['base_asset']), quote_asset=Asset(data['quote_asset']), trade_type=trade_type, amount=amount, rate=rate, fee=deserialize_fee(data['fee']), fee_currency=Asset(data['fee_currency']), link=trade_link, notes=trade_notes, )
def deserialize_from_db( cls, entry: HISTORY_EVENT_DB_TUPLE) -> 'HistoryBaseEntry': """May raise DeserializationError""" event_subtype = None if entry[11] is not None: event_subtype = HistoryEventSubType.deserialize(entry[11]) try: return HistoryBaseEntry( event_identifier=entry[1], sequence_index=entry[2], timestamp=Timestamp(entry[3]), location=Location.deserialize_from_db(entry[4]), location_label=entry[5], asset_balance=AssetBalance( asset=Asset(entry[6]), balance=Balance( amount=FVal(entry[7]), usd_value=FVal(entry[8]), ), ), notes=entry[9], event_type=HistoryEventType.deserialize(entry[10]), event_subtype=event_subtype, ) except ValueError as e: raise DeserializationError( f'Failed to read FVal value from database history event with ' f'event identifier {entry[1]}. {str(e)}', ) from e
def deserialize_from_db(cls, entry: BINANCE_PAIR_DB_TUPLE) -> 'BinancePair': """Create a BinancePair from data in the database. May raise: - DeserializationError - UnsupportedAsset - UnknownAsset """ return BinancePair( symbol=entry[0], base_asset=asset_from_binance(entry[1]), quote_asset=asset_from_binance(entry[2]), location=Location.deserialize_from_db(entry[3]), )
def deserialize_from_db( cls, trade_tuple: AMMSwapDBTuple, ) -> 'AMMSwap': """Turns a tuple read from DB into an appropriate Swap. May raise a DeserializationError if something is wrong with the DB data Trade_tuple index - Schema columns ---------------------------------- 0 - tx_hash 1 - log_index 2 - address 3 - from_address 4 - to_address 5 - timestamp 6 - location 7 - token0_identifier 8 - token1_identifier 9 - amount0_in 10 - amount1_in 11 - amount0_out 12 - amount1_out """ address = deserialize_ethereum_address(trade_tuple[2]) from_address = deserialize_ethereum_address(trade_tuple[3]) to_address = deserialize_ethereum_address(trade_tuple[4]) token0 = deserialize_ethereum_token_from_db(identifier=trade_tuple[7]) token1 = deserialize_ethereum_token_from_db(identifier=trade_tuple[8]) return cls( tx_hash=trade_tuple[0], log_index=trade_tuple[1], address=address, from_address=from_address, to_address=to_address, timestamp=deserialize_timestamp(trade_tuple[5]), location=Location.deserialize_from_db(trade_tuple[6]), token0=token0, token1=token1, amount0_in=deserialize_asset_amount(trade_tuple[9]), amount1_in=deserialize_asset_amount(trade_tuple[10]), amount0_out=deserialize_asset_amount(trade_tuple[11]), amount1_out=deserialize_asset_amount(trade_tuple[12]), )
def deserialize_from_db(cls, entry: TradeDBTuple) -> 'Trade': """May raise: - DeserializationError - UnknownAsset """ return Trade( timestamp=deserialize_timestamp(entry[1]), location=Location.deserialize_from_db(entry[2]), base_asset=Asset(entry[3]), quote_asset=Asset(entry[4]), trade_type=deserialize_trade_type_from_db(entry[5]), amount=deserialize_asset_amount(entry[6]), rate=deserialize_price(entry[7]), fee=deserialize_optional(entry[8], deserialize_fee), fee_currency=deserialize_optional(entry[9], Asset), link=entry[10], notes=entry[11], )
def _serialize_action_for_db( timestamp: Timestamp, action_type: LedgerActionType, location: Location, amount: AssetAmount, asset: Asset, link: str, notes: str, ) -> LedgerActionDBTuple: return ( timestamp, action_type.serialize_for_db(), location.serialize_for_db(), str(amount), asset.identifier, link, notes, )
def deserialize_from_db( cls, data: LedgerActionDBTupleWithIdentifier) -> 'LedgerAction': """May raise: - DeserializationError - UnknownAsset """ return cls( identifier=data[0], timestamp=deserialize_timestamp(data[1]), action_type=LedgerActionType.deserialize_from_db(data[2]), location=Location.deserialize_from_db(data[3]), amount=deserialize_asset_amount(data[4]), asset=Asset(data[5]), rate=deserialize_optional(data[6], deserialize_price), rate_asset=deserialize_optional(data[7], Asset), link=data[8], notes=data[9], )
def update_margin_positions(self, cursor: 'Cursor') -> None: """Upgrades the margin positions table to use the new asset ids if they are ethereum tokens And also makes sure the new primary key id matches the rules used in the app """ query = cursor.execute( 'SELECT id, location, open_time, close_time, profit_loss,' 'pl_currency,fee,fee_currency,link,notes from margin_positions;', ) m_tuples = query.fetchall() cursor.execute('DELETE from margin_positions;') new_tuples = [] for entry in m_tuples: new_pl_currency = self.get_new_asset_identifier_if_existing( entry[5]) new_fee_currency = self.get_new_asset_identifier_if_existing( entry[7]) # formulate the new DB identifier primary key. Copy the identifier() functionality open_time_str = 'None' if entry[2] == 0 else str(entry[2]) new_id_string = (str(Location.deserialize_from_db(entry[1])) + open_time_str + str(entry[3]) + new_pl_currency + new_fee_currency + entry[8]) new_id = hash_id(new_id_string) new_tuples.append(( new_id, entry[1], entry[2], entry[3], entry[4], new_pl_currency, entry[6], new_fee_currency, entry[8], entry[9], )) cursor.executemany( 'INSERT INTO margin_positions(' 'id, location, open_time, close_time, profit_loss,' 'pl_currency,fee,fee_currency,link,notes) ' 'VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?);', new_tuples, )
def update_asset_movements(self, cursor: 'Cursor') -> None: """Upgrades the asset movements table to use the new asset ids if they are ethereum tokens And also makes sure the new primary key id matches the rules used in the app """ query = cursor.execute( 'SELECT id, location, category, address, transaction_id, time,' 'asset,amount,fee_asset,fee,link from asset_movements;', ) m_tuples = query.fetchall() cursor.execute('DELETE from asset_movements;') new_tuples = [] for entry in m_tuples: new_asset = self.get_new_asset_identifier_if_existing(entry[6]) new_fee_asset = self.get_new_asset_identifier_if_existing(entry[8]) # formulate the new DB identifier primary key. Copy the identifier() functionality new_id_string = ( str(Location.deserialize_from_db(entry[1])) + str(deserialize_asset_movement_category_from_db(entry[2])) + str(entry[5]) + new_asset + new_fee_asset + entry[10]) new_id = hash_id(new_id_string) new_tuples.append(( new_id, entry[1], entry[2], entry[3], entry[4], entry[5], new_asset, entry[7], new_fee_asset, entry[9], entry[10], )) cursor.executemany( 'INSERT INTO asset_movements(' 'id, location, category, address, transaction_id, time,' 'asset, amount, fee_asset, fee, link) ' 'VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);', new_tuples, )
def deserialize_from_db(cls, entry: AssetMovementDBTuple) -> 'AssetMovement': """May raise: - DeserializationError - UnknownAsset """ return AssetMovement( location=Location.deserialize_from_db(entry[1]), category=deserialize_asset_movement_category_from_db(entry[2]), address=entry[3], transaction_id=entry[4], timestamp=Timestamp(entry[5]), asset=Asset(entry[6]), # TODO: should we also _force_positive here? I guess not since # we always make sure to save it as positive amount=deserialize_asset_amount(entry[7]), fee_asset=Asset(entry[8]), fee=deserialize_fee(entry[9]), link=entry[10], )
def deserialize_from_db(cls, entry: MarginPositionDBTuple) -> 'MarginPosition': """May raise: - DeserializationError - UnknownAsset """ if entry[2] == 0: open_time = None else: open_time = deserialize_timestamp(entry[2]) return MarginPosition( location=Location.deserialize_from_db(entry[1]), open_time=open_time, close_time=deserialize_timestamp(entry[3]), profit_loss=deserialize_asset_amount(entry[4]), pl_currency=Asset(entry[5]), fee=deserialize_fee(entry[6]), fee_currency=Asset(entry[7]), link=entry[8], notes=entry[9], )
def create_fake_data(self, args: argparse.Namespace) -> None: self._clean_tables() from_ts, to_ts = StatisticsFaker._get_timestamps(args) starting_amount, min_amount, max_amount = StatisticsFaker._get_amounts( args) total_amount = starting_amount locations = [ Location.deserialize(location) for location in args.locations.split(',') ] assets = [Asset(symbol) for symbol in args.assets.split(',')] go_up_probability = FVal(args.go_up_probability) # Add the first distribution of location data location_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(locations))): location_data.append( LocationData( time=from_ts, location=locations[idx].serialize_for_db(), usd_value=str(value), )) # add the location data + total to the DB self.db.add_multiple_location_data(location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value=str(total_amount), ) ]) # Add the first distribution of assets assets_data = [] for idx, value in enumerate( divide_number_in_parts(starting_amount, len(assets))): assets_data.append( DBAssetBalance( category=BalanceType.ASSET, time=from_ts, asset=assets[idx], amount=str(random.randint(1, 20)), usd_value=str(value), )) self.db.add_multiple_balances(assets_data) while from_ts < to_ts: print( f'At timestamp: {from_ts}/{to_ts} wih total net worth: ${total_amount}' ) new_location_data = [] new_assets_data = [] from_ts += args.seconds_between_balance_save # remaining_loops = to_ts - from_ts / args.seconds_between_balance_save add_usd_value = random.choice([100, 350, 500, 625, 725, 915, 1000]) add_amount = random.choice([ FVal('0.1'), FVal('0.23'), FVal('0.34'), FVal('0.69'), FVal('1.85'), FVal('2.54'), ]) go_up = ( # If any asset's usd value is close to to go below zero, go up any( FVal(a.usd_value) - FVal(add_usd_value) < 0 for a in assets_data) or # If total is going under the min amount go up total_amount - add_usd_value < min_amount or # If "dice roll" matched and we won't go over the max amount go up (add_usd_value + total_amount < max_amount and FVal(random.random()) <= go_up_probability)) if go_up: total_amount += add_usd_value action = operator.add else: total_amount -= add_usd_value action = operator.sub for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(locations))): new_location_data.append( LocationData( time=from_ts, location=location_data[idx].location, usd_value=str( action(FVal(location_data[idx].usd_value), value)), )) # add the location data + total to the DB self.db.add_multiple_location_data(new_location_data + [ LocationData( time=from_ts, location=Location.TOTAL.serialize_for_db(), # pylint: disable=no-member usd_value=str(total_amount), ) ]) for idx, value in enumerate( divide_number_in_parts(add_usd_value, len(assets))): old_amount = FVal(assets_data[idx].amount) new_amount = action(old_amount, add_amount) if new_amount < FVal('0'): new_amount = old_amount + FVal('0.01') new_assets_data.append( DBAssetBalance( category=BalanceType.ASSET, time=from_ts, asset=assets[idx], amount=str(new_amount), usd_value=str( action(FVal(assets_data[idx].usd_value), value)), )) self.db.add_multiple_balances(new_assets_data) location_data = new_location_data assets_data = new_assets_data
def _process_entry(entry: Any) -> Union[str, List[Any], Dict[str, Any], Any]: if isinstance(entry, FVal): return str(entry) if isinstance(entry, list): new_list = [] for new_entry in entry: new_list.append(_process_entry(new_entry)) return new_list if isinstance(entry, (dict, AttributeDict)): new_dict = {} for k, v in entry.items(): if isinstance(k, Asset): k = k.identifier new_dict[k] = _process_entry(v) return new_dict if isinstance(entry, HexBytes): return entry.hex() if isinstance(entry, LocationData): return { 'time': entry.time, 'location': str(Location.deserialize_from_db(entry.location)), 'usd_value': entry.usd_value, } if isinstance(entry, SingleDBAssetBalance): return { 'time': entry.time, 'category': str(entry.category), 'amount': entry.amount, 'usd_value': entry.usd_value, } if isinstance(entry, DBAssetBalance): return { 'time': entry.time, 'category': str(entry.category), 'asset': entry.asset.identifier, 'amount': entry.amount, 'usd_value': entry.usd_value, } if isinstance(entry, ( DefiProtocol, MakerdaoVault, XpubData, )): return entry.serialize() if isinstance(entry, ( Trade, EthereumTransaction, MakerdaoVault, DSRAccountReport, Balance, AaveLendingBalance, AaveBorrowingBalance, CompoundBalance, YearnVaultEvent, YearnVaultBalance, AaveEvent, UniswapPool, UniswapPoolAsset, UnknownEthereumToken, AMMTrade, UniswapPoolEventsBalance, ADXStakingHistory, BalancerBPTEventPoolToken, BalancerEvent, BalancerPoolEventsBalance, BalancerPoolBalance, BalancerPoolTokenBalance, )): return process_result(entry.serialize()) if isinstance(entry, ( DBSettings, CompoundEvent, VersionCheckResult, DBSettings, DSRCurrentBalances, ManuallyTrackedBalanceWithValue, VaultEvent, MakerdaoVaultDetails, AaveBalances, AaveHistory, DefiBalance, DefiProtocolBalances, YearnVaultHistory, BlockchainAccountData, Eth2Deposit, )): return process_result(entry._asdict()) if isinstance(entry, tuple): raise ValueError('Query results should not contain plain tuples') if isinstance(entry, Asset): return entry.identifier if isinstance(entry, ( TradeType, Location, KrakenAccountType, Location, VaultEventType, AssetMovementCategory, CurrentPriceOracle, HistoricalPriceOracle, LedgerActionType, )): return str(entry) # else return entry
def create_action(self, index: int, ts: Timestamp): """Create a random trade action on a random exchange depending on the funds that are available in that exchange""" # choose an exchange at random exchange_name = random.choice(ALLOWED_EXCHANGES) exchange = getattr(self, exchange_name) # choose a random pair at that exchange pair = exchange.choose_pair( timestamp=ts, price_query=self.query_historical_price, ) print( f'Creating trade {index + 1} / {self.trades_number} in {exchange_name}' f' for the pair: {pair} at timestamp {ts}', ) # depending on our funds decide on what to do. Buy/sell base, quote = pair_get_assets(pair) if exchange.get_balance(base) is None: action_type = TradeType.BUY elif exchange.get_balance(quote) is None: action_type = TradeType.SELL else: # TODO: trade the one we have most of action_type = random.choice(list(TradeType)) # if we are buying we are going to spend from the quote asset if action_type == TradeType.BUY: spending_asset = quote else: # selling spends from the base asset spending_asset = base # get a spending asset amount within our per-trade equivalent range and # our available funds spending_usd_rate = self.query_historical_price(spending_asset, A_USD, ts) max_usd_in_spending_asset = spending_usd_rate * exchange.get_balance(spending_asset) max_usd_equivalent_to_spend = min(max_usd_in_spending_asset, MAX_TRADE_USD_VALUE) rate = self.query_historical_price(base, quote, ts) usd_to_spend = FVal(random.uniform(0.01, float(max_usd_equivalent_to_spend))) amount_in_spending_asset = usd_to_spend / spending_usd_rate # if we are buying then the amount is the amount of asset we bought if action_type == TradeType.BUY: amount = amount_in_spending_asset / rate # if we are selling the amount is the spending asset amount else: amount = amount_in_spending_asset quote_asset_usd_rate = self.query_historical_price(quote, A_USD, ts) fee_in_quote_currency = FVal(random.uniform(0, MAX_FEE_USD_VALUE)) / quote_asset_usd_rate # create the trade base, quote = pair.split('_') trade = Trade( timestamp=ts, location=Location.deserialize(exchange_name), base_asset=base, quote_asset=quote, trade_type=action_type, amount=amount, rate=rate, fee=fee_in_quote_currency, fee_currency=quote, link='', notes='', ) logger.info(f'Created trade: {trade}') # Adjust our global and per exchange accounting if action_type == TradeType.BUY: # we buy so we increase our base asset by amount self.increase_asset(base, amount, exchange_name) # and decrease quote by amount * rate self.decrease_asset(quote, amount * rate, exchange_name) else: # we sell so we increase our quote asset self.increase_asset(quote, amount * rate, exchange_name) # and decrease our base asset self.decrease_asset(base, amount, exchange_name) # finally add it to the exchange exchange.append_trade(trade)
def update_trades(self, cursor: 'Cursor') -> None: """Upgrades the trades table to use base/quote asset instead of a pair Also upgrades all asset ids if they are ethereum tokens And also makes sure the new primary key id matches the rules used in the app """ # Get all old data and transform it to the new schema query = cursor.execute( 'SELECT id, ' ' time, ' ' location, ' ' pair, ' ' type, ' ' amount, ' ' rate, ' ' fee, ' ' fee_currency, ' ' link, ' ' notes from trades; ', ) new_trade_tuples = [] for entry in query: try: base, quote = pair_get_asset_ids(entry[3]) except ValueError as e: self.msg_aggregator.add_warning( f'During v24 -> v25 DB upgrade {str(e)}. This should not have happened.' f' Removing the trade with id {entry[0]} at timestamp {entry[1]} ' f'and location {str(Location.deserialize_from_db(entry[2]))} that ' f'contained the offending pair from the DB.', ) continue new_id = self.get_new_asset_identifier(base) new_base = new_id if new_id else base new_id = self.get_new_asset_identifier(quote) new_quote = new_id if new_id else quote new_id = self.get_new_asset_identifier(entry[8]) new_fee_currency = new_id if new_id else entry[8] timestamp = entry[1] amount = entry[5] rate = entry[6] old_link = entry[9] link = None if old_link == '' else old_link notes = None if entry[10] == '' else entry[10] # Copy the identifier() functionality. This identifier does not sound like a good idea new_trade_id_string = ( str(Location.deserialize_from_db(entry[2])) + str(timestamp) + str(deserialize_trade_type_from_db(entry[4])) + new_base + new_quote + amount + rate + old_link) new_trade_id = hash_id(new_trade_id_string) new_trade_tuples.append(( new_trade_id, entry[1], # time entry[2], # location new_base, new_quote, entry[4], # type amount, rate, entry[7], # fee new_fee_currency, link, notes, )) # Upgrade the table cursor.execute('DROP TABLE IF EXISTS trades;') cursor.execute(""" CREATE TABLE IF NOT EXISTS trades ( id TEXT PRIMARY KEY NOT NULL, time INTEGER NOT NULL, location CHAR(1) NOT NULL DEFAULT('A') REFERENCES location(location), base_asset TEXT NOT NULL, quote_asset TEXT NOT NULL, type CHAR(1) NOT NULL DEFAULT ('A') REFERENCES trade_type(type), amount TEXT NOT NULL, rate TEXT NOT NULL, fee TEXT, fee_currency TEXT, link TEXT, notes TEXT ); """) # Insert the new data executestr = """ INSERT INTO trades( id, time, location, base_asset, quote_asset, type, amount, rate, fee, fee_currency, link, notes) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """ cursor.executemany(executestr, new_trade_tuples)
def data_migration_1(rotki: 'Rotkehlchen') -> None: """ Purge data for exchanges where there is more than one instance. Also purge information from kraken as requested for https://github.com/rotki/rotki/pull/3755 """ exchange_re = re.compile(r'(.*?)_(trades|margins|asset_movements|ledger_actions).*') db = rotki.data.db cursor = db.conn.cursor() used_ranges = cursor.execute('SELECT * from used_query_ranges').fetchall() credentials_result = cursor.execute('SELECT * from user_credentials') location_to_name = {} multiple_locations = set() for result in credentials_result: try: location = Location.deserialize_from_db(result[1]) except DeserializationError as e: log.error( f'During data migration 1 found location {result[1]} ' f'that could not be deserialized due to {str(e)}', ) continue if location in location_to_name: multiple_locations.add(location) else: location_to_name[location] = result[0] for used_range in used_ranges: range_name = used_range[0] match = exchange_re.search(range_name) if match is None: continue location_str = match.group(1) entry_type = match.group(2) try: location = Location.deserialize(location_str) except DeserializationError as e: log.error( f'During data migration 1 could not deserialize location ' f'string {location_str} to location due to {str(e)}', ) continue if location not in location_to_name: if location in SUPPORTED_EXCHANGES: # Can happen if there is a stray used_query_range from a non-connected exchange cursor.execute('DELETE FROM used_query_ranges WHERE name=?', (range_name,)) # in any case continue. Can also be non CEX location such as uniswap/balancer continue if location in multiple_locations or location == Location.KRAKEN: db.purge_exchange_data(location) db.delete_used_query_range_for_exchange(location) else: cursor.execute( 'UPDATE used_query_ranges SET name=? WHERE name=?', (f'{location_str}_{entry_type}_{location_to_name[location]}', range_name), ) db.conn.commit()
def deserialize_from_db( cls, trade_tuple: AMMSwapDBTuple, ) -> 'AMMSwap': """Turns a tuple read from DB into an appropriate Swap. May raise a DeserializationError if something is wrong with the DB data Trade_tuple index - Schema columns ---------------------------------- 0 - tx_hash 1 - log_index 2 - address 3 - from_address 4 - to_address 5 - timestamp 6 - location 7 - is_token0_unknown 8 - token0_address 9 - token0_symbol 10 - token0_name 11 - token0_decimals 12 - is_token1_unknown 13 - token1_address 14 - token1_symbol 15 - token1_name 16 - token1_decimals 17 - amount0_in 18 - amount1_in 19 - amount0_out 20 - amount1_out """ address = deserialize_ethereum_address(trade_tuple[2]) from_address = deserialize_ethereum_address(trade_tuple[3]) to_address = deserialize_ethereum_address(trade_tuple[4]) is_token0_unknown = trade_tuple[7] is_token1_unknown = trade_tuple[12] token0: Union[EthereumToken, UnknownEthereumToken] token1: Union[EthereumToken, UnknownEthereumToken] if is_token0_unknown: token0 = deserialize_unknown_ethereum_token_from_db( ethereum_address=trade_tuple[8], symbol=trade_tuple[9], name=trade_tuple[10], decimals=trade_tuple[11], ) else: token0 = deserialize_ethereum_token_from_db( identifier=trade_tuple[9]) if is_token1_unknown: token1 = deserialize_unknown_ethereum_token_from_db( ethereum_address=trade_tuple[13], symbol=trade_tuple[14], name=trade_tuple[15], decimals=trade_tuple[16], ) else: token1 = deserialize_ethereum_token_from_db( identifier=trade_tuple[14]) return cls( tx_hash=trade_tuple[0], log_index=trade_tuple[1], address=address, from_address=from_address, to_address=to_address, timestamp=deserialize_timestamp(trade_tuple[5]), location=Location.deserialize_from_db(trade_tuple[6]), token0=token0, token1=token1, amount0_in=deserialize_asset_amount(trade_tuple[17]), amount1_in=deserialize_asset_amount(trade_tuple[18]), amount0_out=deserialize_asset_amount(trade_tuple[19]), amount1_out=deserialize_asset_amount(trade_tuple[20]), )