def test_ledger_action_can_be_removed(database, function_scope_messages_aggregator): db = DBLedgerActions(database, function_scope_messages_aggregator) query = 'SELECT COUNT(*) FROM ledger_actions WHERE identifier=?' cursor = database.conn.cursor() # Add the entry that we want to delete action = LedgerAction( identifier=0, # whatever timestamp=1, action_type=LedgerActionType.INCOME, location=Location.EXTERNAL, amount=FVal(1), asset=A_ETH, rate=None, rate_asset=None, link=None, notes=None, ) identifier = db.add_ledger_action(action) # Delete ledger action assert db.remove_ledger_action(identifier) is None # Check that the change has been committed cursor.execute(query, (identifier, )) assert cursor.fetchone() == (0, )
def query_ledger_actions( self, filter_query: LedgerActionsFilterQuery, only_cache: bool, ) -> Tuple[List['LedgerAction'], int]: """Queries the ledger actions with the given filter query :param only_cache: Optional. If this is true then the equivalent exchange/location is not queried, but only what is already in the DB is returned. """ location = filter_query.location from_ts = filter_query.from_ts to_ts = filter_query.to_ts if only_cache is False: # query services for exchange in self.exchange_manager.iterate_exchanges(): if location is None or exchange.location == location: exchange.query_income_loss_expense( start_ts=from_ts, end_ts=to_ts, only_cache=False, ) db = DBLedgerActions(self.db, self.msg_aggregator) has_premium = self.chain_manager.premium is not None actions, filter_total_found = db.get_ledger_actions_and_limit_info( filter_query=filter_query, has_premium=has_premium, ) return actions, filter_total_found
def test_all_action_types_writtable_in_db(database, function_scope_messages_aggregator): db = DBLedgerActions(database, function_scope_messages_aggregator) query = 'SELECT COUNT(*) FROM ledger_actions WHERE identifier=?' cursor = database.conn.cursor() for entry in LedgerActionType: action = LedgerAction( identifier=0, # whatever timestamp=1, action_type=entry, location=Location.EXTERNAL, amount=FVal(1), asset=A_ETH, rate=None, rate_asset=None, link=None, notes=None, ) identifier = db.add_ledger_action(action) # Check that changes have been committed to db cursor.execute(query, (identifier, )) assert cursor.fetchone() == (1, ) assert len(db.get_ledger_actions(None, None, None)) == len(LedgerActionType)
def test_ledger_action_can_be_edited(database, function_scope_messages_aggregator): db = DBLedgerActions(database, function_scope_messages_aggregator) query = 'SELECT * FROM ledger_actions WHERE identifier=?' cursor = database.conn.cursor() # Add the entry that we want to edit action = LedgerAction( identifier=0, # whatever timestamp=1, action_type=LedgerActionType.INCOME, location=Location.EXTERNAL, amount=FVal(1), asset=A_ETH, rate=None, rate_asset=None, link=None, notes=None, ) identifier = db.add_ledger_action(action) # Data for the new entry new_entry = LedgerAction( identifier=identifier, timestamp=2, action_type=LedgerActionType.GIFT, location=Location.EXTERNAL, amount=FVal(3), asset=A_ETH, rate=FVal(100), rate_asset=A_USD, link='foo', notes='updated', ) assert db.edit_ledger_action(new_entry) is None # Check that changes have been committed cursor.execute(query, (identifier, )) updated_entry = LedgerAction.deserialize_from_db(cursor.fetchone()) new_entry.identifier = identifier assert updated_entry == new_entry # now try to see if the optional assets can also be set to None new_entry.rate = new_entry.rate_asset = new_entry.link = new_entry.notes = None assert db.edit_ledger_action(new_entry) is None cursor.execute(query, (identifier, )) updated_entry = LedgerAction.deserialize_from_db(cursor.fetchone()) assert updated_entry.rate is None assert updated_entry.rate_asset is None assert updated_entry.link is None assert updated_entry.notes is None
def test_all_action_types_writtable_in_db(database, function_scope_messages_aggregator): db = DBLedgerActions(database, function_scope_messages_aggregator) for entry in LedgerActionType: db.add_ledger_action( timestamp=1, action_type=entry, location=Location.EXTERNAL, amount=FVal(1), asset=A_ETH, link='', notes='', ) assert len(db.get_ledger_actions(None, None, None)) == len(LedgerActionType)
def query_income_loss_expense( self, start_ts: Timestamp, end_ts: Timestamp, only_cache: bool, ) -> List[LedgerAction]: """Queries the local DB and the exchange for the income/loss/expense history of the user If only_cache is true only what is already cached in the DB is returned without an actual exchange query. """ db = DBLedgerActions(self.db, self.db.msg_aggregator) filter_query = LedgerActionsFilterQuery.make( from_ts=start_ts, to_ts=end_ts, location=self.location, ) # has_premium True is fine here since the result of this is not user facing atm ledger_actions = db.get_ledger_actions(filter_query=filter_query, has_premium=True) if only_cache: return ledger_actions ranges = DBQueryRanges(self.db) location_string = f'{str(self.location)}_ledger_actions_{self.name}' ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) new_ledger_actions = [] for query_start_ts, query_end_ts in ranges_to_query: new_ledger_actions.extend( self.query_online_income_loss_expense( start_ts=query_start_ts, end_ts=query_end_ts, )) if new_ledger_actions != []: db.add_ledger_actions(new_ledger_actions) ranges.update_used_query_range( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) ledger_actions.extend(new_ledger_actions) return ledger_actions
def query_ledger_actions( self, has_premium: bool, from_ts: Optional[Timestamp], to_ts: Optional[Timestamp], location: Optional[Location] = None, ) -> Tuple[List['LedgerAction'], int]: """Queries the ledger actions from the DB and applies the free version limit TODO: Since we always query all in one call, the limiting will work, but if we start batch querying by time then we need to amend the logic of limiting here. Would need to use the same logic we do with trades. Using db entries count and count what all calls return and what is sums up to """ db = DBLedgerActions(self.db, self.msg_aggregator) actions = db.get_ledger_actions(from_ts=from_ts, to_ts=to_ts, location=location) original_length = len(actions) if has_premium is False: actions = actions[:FREE_LEDGER_ACTIONS_LIMIT] return actions, original_length
class GitcoinProcessor(): def __init__(self, db: DBHandler) -> None: self.db = db self.db_ledger = DBLedgerActions(self.db, self.db.msg_aggregator) def process_gitcoin( self, from_ts: Optional[Timestamp], to_ts: Optional[Timestamp], grant_id: Optional[int], ) -> Tuple[Asset, Dict[int, GitcoinReport]]: """Processess gitcoin transactions in the given time range and creates a report""" actions = self.db_ledger.get_gitcoin_grant_events( grant_id=grant_id, from_ts=from_ts, to_ts=to_ts, ) profit_currency = self.db.get_main_currency() reports: DefaultDict[int, GitcoinReport] = defaultdict(GitcoinReport) for entry in actions: balance = Balance(amount=entry.amount) if entry.rate_asset is None or entry.rate is None: log.error( f'Found gitcoin ledger action for {entry.amount} {entry.asset} ' f'without a rate asset. Should not happen. Entry was ' f'possibly edited by hand. Skipping.', ) continue report = reports[entry.extra_data.grant_id] # type: ignore rate = entry.rate if entry.rate_asset != profit_currency: try: profit_currency_in_rate_asset = PriceHistorian().query_historical_price( from_asset=profit_currency, to_asset=entry.rate_asset, timestamp=entry.timestamp, ) except NoPriceForGivenTimestamp as e: self.db.msg_aggregator.add_error( f'{str(e)} when processing gitcoin entry. Skipping entry.', ) continue rate = entry.rate / profit_currency_in_rate_asset # type: ignore # checked above value_in_profit_currency = entry.amount * rate balance.usd_value = value_in_profit_currency report.per_asset[entry.asset] += balance report.total += value_in_profit_currency return self.db.get_main_currency(), reports
def test_ledger_action_can_be_edited(database, function_scope_messages_aggregator): db = DBLedgerActions(database, function_scope_messages_aggregator) query = 'SELECT * FROM ledger_actions WHERE identifier=?' cursor = database.conn.cursor() # Add the entry that we want to edit identifier = db.add_ledger_action( timestamp=1, action_type=LedgerActionType.INCOME, location=Location.EXTERNAL, amount=FVal(1), asset=A_ETH, link='', notes='', ) # Data for the new entry new_entry = LedgerAction( identifier=identifier, timestamp=2, action_type=LedgerActionType.GIFT, location=Location.EXTERNAL, amount=FVal(3), asset=A_ETH, link='', notes='updated', ) assert db.edit_ledger_action(new_entry) is None # Check that changes have been committed cursor.execute(query, (identifier, )) updated_entry = LedgerAction(*cursor.fetchone()) assert updated_entry.timestamp == new_entry.timestamp assert str(updated_entry.amount) == str(new_entry.amount) assert updated_entry.action_type == new_entry.action_type.serialize_for_db( )
def process_entry( self, db: DBHandler, db_ledger: DBLedgerActions, timestamp: Timestamp, data: BinanceCsvRow, ) -> None: asset = data['Coin'] amount = data['Change'] ledger_action = LedgerAction( identifier=0, timestamp=timestamp, action_type=LedgerActionType.INCOME, location=Location.BINANCE, amount=amount, asset=asset, rate=None, rate_asset=None, link=None, notes=f'Imported from binance CSV file. Binance operation: {data["Operation"]}', ) db_ledger.add_ledger_action(ledger_action)
def assert_blockfi_transactions_import_results(rotki: Rotkehlchen): """A utility function to help assert on correctness of importing data from blockfi""" ledger_db = DBLedgerActions(rotki.data.db, rotki.msg_aggregator) ledger_actions = ledger_db.get_ledger_actions(None, None, None) asset_movements = rotki.data.db.get_asset_movements() warnings = rotki.msg_aggregator.consume_warnings() errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 0 assert len(warnings) == 0 expected_actions = [LedgerAction( identifier=3, timestamp=Timestamp(1600293599), action_type=LedgerActionType.INCOME, location=Location.BLOCKFI, amount=AssetAmount(FVal('0.48385358')), asset=A_ETH, rate=None, rate_asset=None, link=None, notes='Bonus Payment from BlockFi', ), LedgerAction( identifier=2, timestamp=Timestamp(1606953599), action_type=LedgerActionType.INCOME, location=Location.BLOCKFI, amount=AssetAmount(FVal('0.00052383')), asset=A_BTC, rate=None, rate_asset=None, link=None, notes='Referral Bonus from BlockFi', ), LedgerAction( identifier=1, timestamp=Timestamp(1612051199), action_type=LedgerActionType.INCOME, location=Location.BLOCKFI, amount=AssetAmount(FVal('0.56469042')), asset=A_ETH, rate=None, rate_asset=None, link=None, notes='Interest Payment from BlockFi', )] assert expected_actions == ledger_actions expected_movements = [AssetMovement( location=Location.BLOCKFI, category=AssetMovementCategory.DEPOSIT, timestamp=Timestamp(1595247055), address=None, transaction_id=None, asset=A_BTC, amount=AssetAmount(FVal('1.11415058')), fee_asset=A_USD, fee=Fee(ZERO), link='', ), AssetMovement( location=Location.BLOCKFI, category=AssetMovementCategory.WITHDRAWAL, address=None, transaction_id=None, timestamp=Timestamp(1605977971), asset=A_ETH, amount=AssetAmount(FVal('3')), fee_asset=A_USD, fee=Fee(ZERO), link='', )] assert expected_movements == asset_movements
class GitcoinDataImporter(): def __init__(self, db: DBHandler) -> None: self.db = db self.db_ledger = DBLedgerActions(self.db, self.db.msg_aggregator) self.grantid_re = re.compile(r'/grants/(\d+)/.*') def _consume_grant_entry(self, entry: Dict[str, Any]) -> Optional[LedgerAction]: """ Consumes a grant entry from the CSV and turns it into a LedgerAction May raise: - DeserializationError - KeyError - UnknownAsset """ if entry['Type'] != 'grant': return None timestamp = deserialize_timestamp_from_date( date=entry['date'], formatstr='%Y-%m-%dT%H:%M:%S', location='Gitcoin CSV', skip_milliseconds=True, ) usd_value = deserialize_asset_amount(entry['Value In USD']) asset = get_asset_by_symbol(entry['token_name']) if asset is None: raise UnknownAsset(entry['token_name']) token_amount = deserialize_asset_amount(entry['token_value']) if token_amount == ZERO: # try to make up for https://github.com/gitcoinco/web/issues/9213 price = query_usd_price_zero_if_error( asset=asset, time=timestamp, location=f'Gitcoin CSV entry {entry["txid"]}', msg_aggregator=self.db.msg_aggregator, ) if price == ZERO: self.db.msg_aggregator.add_warning( f'Could not process gitcoin grant entry at {entry["date"]} for {asset.symbol} ' f'due to amount being zero and inability to find price. Skipping.', ) return None # calculate the amount from price and value token_amount = usd_value / price # type: ignore match = self.grantid_re.search(entry['url']) if match is None: self.db.msg_aggregator.add_warning( f'Could not process gitcoin grant entry at {entry["date"]} for {asset.symbol} ' f'due to inability to read grant id. Skipping.', ) return None grant_id = int(match.group(1)) rate = Price(usd_value / token_amount) raw_txid = entry['txid'] tx_type, tx_id = process_gitcoin_txid(key='txid', entry=entry) return LedgerAction( identifier=1, # whatever does not go in the DB timestamp=timestamp, action_type=LedgerActionType.DONATION_RECEIVED, location=Location.GITCOIN, amount=token_amount, asset=asset, rate=rate, rate_asset=A_USD, # let's use the rate gitcoin calculated link=raw_txid, notes=f'Gitcoin grant {grant_id} event', extra_data=GitcoinEventData( tx_id=tx_id, grant_id=grant_id, clr_round=None, # can't get round from CSV tx_type=tx_type, ), ) def import_gitcoin_csv(self, filepath: Path) -> None: with open(filepath, 'r', encoding='utf-8-sig') as csvfile: data = csv.DictReader(csvfile, delimiter=',', quotechar='"') actions = [] for row in data: try: action = self._consume_grant_entry(row) except UnknownAsset as e: self.db.msg_aggregator.add_warning( f'During gitcoin grant CSV processing found asset {e.asset_name} ' f'that cant be matched to a single known asset. Skipping entry.', ) continue except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.db.msg_aggregator.add_error( 'Unexpected data encountered during deserialization of a gitcoin CSV ' 'entry. Check logs for details and open a bug report.', ) log.error( f'Unexpected data encountered during deserialization of a gitcoin ' f'CSV entry: {row} . Error was: {msg}', ) continue if action: actions.append(action) db_actions = self.db_ledger.get_ledger_actions( filter_query=LedgerActionsFilterQuery.make( location=Location.GITCOIN), has_premium=True, ) existing_txids = [x.link for x in db_actions] self.db_ledger.add_ledger_actions( [x for x in actions if x.link not in existing_txids])
def test_store_same_tx_hash_in_db(database): """Test that if somehow during addition a duplicate is added, it's ignored and only 1 ends up in the db""" action1 = LedgerAction( identifier=1, timestamp=Timestamp(1624791600), action_type=LedgerActionType.DONATION_RECEIVED, location=Location.GITCOIN, amount=FVal('0.0004789924016679019628604417823'), asset=A_ETH, rate=FVal('1983.33'), rate_asset=A_USD, link= '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1', notes='Gitcoin grant 149 event', extra_data=GitcoinEventData( tx_id= '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1', grant_id=149, clr_round=None, tx_type=GitcoinEventTxType.ETHEREUM, ), ) action2 = LedgerAction( identifier=2, timestamp=Timestamp(1634791600), action_type=LedgerActionType.DONATION_RECEIVED, location=Location.GITCOIN, amount=FVal('0.789924016679019628604417823'), asset=A_ETH, rate=FVal('1913.33'), rate_asset=A_USD, link= '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1', notes='Gitcoin grant 149 event', extra_data=GitcoinEventData( tx_id= '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1', grant_id=149, clr_round=None, tx_type=GitcoinEventTxType.ETHEREUM, ), ) action3 = LedgerAction( identifier=2, timestamp=Timestamp(1654791600), action_type=LedgerActionType.DONATION_RECEIVED, location=Location.GITCOIN, amount=FVal('2445533521905078832065264'), asset=A_ETH, rate=FVal('1973.33'), rate_asset=A_USD, link= 'sync-tx:5612f84bc20cda25b911af39b792c973bdd5916b3b6868db2420b5dafd705a90', notes='Gitcoin grant 149 event', extra_data=GitcoinEventData( tx_id= '5612f84bc20cda25b911af39b792c973bdd5916b3b6868db2420b5dafd705a90', grant_id=149, clr_round=None, tx_type=GitcoinEventTxType.ZKSYNC, ), ) dbledger = DBLedgerActions(database, database.msg_aggregator) dbledger.add_ledger_actions([action1, action2, action3]) stored_actions = dbledger.get_ledger_actions( filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN), has_premium=True, ) assert stored_actions == [action1, action3] errors = database.msg_aggregator.consume_errors() warnings = database.msg_aggregator.consume_warnings() assert len(errors) == 0 assert len(warnings) == 1 assert 'Did not add ledger action to DB' in warnings[0]
def __init__(self, db: DBHandler) -> None: self.db = db self.db_ledger = DBLedgerActions(self.db, self.db.msg_aggregator) self.grantid_re = re.compile(r'/grants/(\d+)/.*')
class DataImporter(): def __init__(self, db: DBHandler) -> None: self.db = db self.db_ledger = DBLedgerActions(self.db, self.db.msg_aggregator) def _consume_cointracking_entry(self, csv_row: Dict[str, Any]) -> None: """Consumes a cointracking entry row from the CSV and adds it into the database Can raise: - DeserializationError if something is wrong with the format of the expected values - UnsupportedCointrackingEntry if importing of this entry is not supported. - IndexError if the CSV file is corrupt - KeyError if the an expected CSV key is missing - UnknownAsset if one of the assets founds in the entry are not supported """ row_type = csv_row['Type'] timestamp = deserialize_timestamp_from_date( date=csv_row['Date'], formatstr='%d.%m.%Y %H:%M:%S', location='cointracking.info', ) notes = csv_row['Comment'] location = exchange_row_to_location(csv_row['Exchange']) fee = Fee(ZERO) fee_currency = A_USD # whatever (used only if there is no fee) if csv_row['Fee'] != '': fee = deserialize_fee(csv_row['Fee']) fee_currency = symbol_to_asset_or_token(csv_row['Cur.Fee']) if row_type in ('Gift/Tip', 'Trade', 'Income'): base_asset = symbol_to_asset_or_token(csv_row['Cur.Buy']) quote_asset = None if csv_row[ 'Cur.Sell'] == '' else symbol_to_asset_or_token( csv_row['Cur.Sell']) # noqa: E501 if quote_asset is None and row_type not in ('Gift/Tip', 'Income'): raise DeserializationError( 'Got a trade entry with an empty quote asset') if quote_asset is None: # Really makes no difference as this is just a gift and the amount is zero quote_asset = A_USD base_amount_bought = deserialize_asset_amount(csv_row['Buy']) if csv_row['Sell'] != '-': quote_amount_sold = deserialize_asset_amount(csv_row['Sell']) else: quote_amount_sold = AssetAmount(ZERO) rate = Price(quote_amount_sold / base_amount_bought) trade = Trade( timestamp=timestamp, location=location, base_asset=base_asset, quote_asset=quote_asset, trade_type=TradeType. BUY, # It's always a buy during cointracking import amount=base_amount_bought, rate=rate, fee=fee, fee_currency=fee_currency, link='', notes=notes, ) self.db.add_trades([trade]) elif row_type in ('Deposit', 'Withdrawal'): category = deserialize_asset_movement_category(row_type.lower()) if category == AssetMovementCategory.DEPOSIT: amount = deserialize_asset_amount(csv_row['Buy']) asset = symbol_to_asset_or_token(csv_row['Cur.Buy']) else: amount = deserialize_asset_amount_force_positive( csv_row['Sell']) asset = symbol_to_asset_or_token(csv_row['Cur.Sell']) asset_movement = AssetMovement( location=location, category=category, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=fee, fee_asset=fee_currency, link='', ) self.db.add_asset_movements([asset_movement]) else: raise UnsupportedCSVEntry( f'Unknown entrype type "{row_type}" encountered during cointracking ' f'data import. Ignoring entry', ) def import_cointracking_csv(self, filepath: Path) -> Tuple[bool, str]: with open(filepath, 'r', encoding='utf-8-sig') as csvfile: data = csv.reader(csvfile, delimiter=',', quotechar='"') header = remap_header(next(data)) for row in data: try: self._consume_cointracking_entry(dict(zip(header, row))) except UnknownAsset as e: self.db.msg_aggregator.add_warning( f'During cointracking CSV import found action with unknown ' f'asset {e.asset_name}. Ignoring entry', ) continue except IndexError: self.db.msg_aggregator.add_warning( 'During cointracking CSV import found entry with ' 'unexpected number of columns', ) continue except DeserializationError as e: self.db.msg_aggregator.add_warning( f'Error during cointracking CSV import deserialization. ' f'Error was {str(e)}. Ignoring entry', ) continue except UnsupportedCSVEntry as e: self.db.msg_aggregator.add_warning(str(e)) continue except KeyError as e: return False, str(e) return True, '' def _consume_cryptocom_entry(self, csv_row: Dict[str, Any]) -> None: """Consumes a cryptocom entry row from the CSV and adds it into the database Can raise: - DeserializationError if something is wrong with the format of the expected values - UnsupportedCryptocomEntry if importing of this entry is not supported. - KeyError if the an expected CSV key is missing - UnknownAsset if one of the assets founds in the entry are not supported """ row_type = csv_row['Transaction Kind'] timestamp = deserialize_timestamp_from_date( date=csv_row['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) description = csv_row['Transaction Description'] notes = f'{description}\nSource: crypto.com (CSV import)' # No fees info until (Nov 2020) on crypto.com # fees are not displayed in the export data fee = Fee(ZERO) fee_currency = A_USD # whatever (used only if there is no fee) if row_type in ( 'crypto_purchase', 'crypto_exchange', 'referral_gift', 'referral_bonus', 'crypto_earn_interest_paid', 'referral_card_cashback', 'card_cashback_reverted', 'reimbursement', ): # variable mapping to raw data currency = csv_row['Currency'] to_currency = csv_row['To Currency'] native_currency = csv_row['Native Currency'] amount = csv_row['Amount'] to_amount = csv_row['To Amount'] native_amount = csv_row['Native Amount'] trade_type = TradeType.BUY if to_currency != native_currency else TradeType.SELL if row_type == 'crypto_exchange': # trades crypto to crypto base_asset = symbol_to_asset_or_token(to_currency) quote_asset = symbol_to_asset_or_token(currency) if quote_asset is None: raise DeserializationError( 'Got a trade entry with an empty quote asset') base_amount_bought = deserialize_asset_amount(to_amount) quote_amount_sold = deserialize_asset_amount(amount) else: base_asset = symbol_to_asset_or_token(currency) quote_asset = symbol_to_asset_or_token(native_currency) base_amount_bought = deserialize_asset_amount(amount) quote_amount_sold = deserialize_asset_amount(native_amount) rate = Price(abs(quote_amount_sold / base_amount_bought)) trade = Trade( timestamp=timestamp, location=Location.CRYPTOCOM, base_asset=base_asset, quote_asset=quote_asset, trade_type=trade_type, amount=base_amount_bought, rate=rate, fee=fee, fee_currency=fee_currency, link='', notes=notes, ) self.db.add_trades([trade]) elif row_type in ('crypto_withdrawal', 'crypto_deposit'): if row_type == 'crypto_withdrawal': category = AssetMovementCategory.WITHDRAWAL amount = deserialize_asset_amount_force_positive( csv_row['Amount']) else: category = AssetMovementCategory.DEPOSIT amount = deserialize_asset_amount(csv_row['Amount']) asset = symbol_to_asset_or_token(csv_row['Currency']) asset_movement = AssetMovement( location=Location.CRYPTOCOM, category=category, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=fee, fee_asset=asset, link='', ) self.db.add_asset_movements([asset_movement]) elif row_type in ('airdrop_to_exchange_transfer', 'mco_stake_reward'): asset = symbol_to_asset_or_token(csv_row['Currency']) amount = deserialize_asset_amount(csv_row['Amount']) action = LedgerAction( identifier=0, # whatever is not used at insertion timestamp=timestamp, action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=amount, asset=asset, rate=None, rate_asset=None, link=None, notes=None, ) self.db_ledger.add_ledger_action(action) elif row_type == 'invest_deposit': asset = symbol_to_asset_or_token(csv_row['Currency']) amount = deserialize_asset_amount(csv_row['Amount']) asset_movement = AssetMovement( location=Location.CRYPTOCOM, category=AssetMovementCategory.DEPOSIT, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=fee, fee_asset=fee_currency, link='', ) self.db.add_asset_movements([asset_movement]) elif row_type == 'invest_withdrawal': asset = symbol_to_asset_or_token(csv_row['Currency']) amount = deserialize_asset_amount(csv_row['Amount']) asset_movement = AssetMovement( location=Location.CRYPTOCOM, category=AssetMovementCategory.WITHDRAWAL, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=fee, fee_asset=fee_currency, link='', ) self.db.add_asset_movements([asset_movement]) elif row_type in ( 'crypto_earn_program_created', 'crypto_earn_program_withdrawn', 'lockup_lock', 'lockup_unlock', 'dynamic_coin_swap_bonus_exchange_deposit', 'crypto_wallet_swap_debited', 'crypto_wallet_swap_credited', 'lockup_swap_debited', 'lockup_swap_credited', 'lockup_swap_rebate', 'dynamic_coin_swap_bonus_exchange_deposit', # we don't handle cryto.com exchange yet 'crypto_to_exchange_transfer', 'exchange_to_crypto_transfer', # supercharger actions 'supercharger_deposit', 'supercharger_withdrawal', # already handled using _import_cryptocom_associated_entries 'dynamic_coin_swap_debited', 'dynamic_coin_swap_credited', 'dust_conversion_debited', 'dust_conversion_credited', 'interest_swap_credited', 'interest_swap_debited', # The user has received an aidrop but can't claim it yet 'airdrop_locked', ): # those types are ignored because it doesn't affect the wallet balance # or are not handled here return else: raise UnsupportedCSVEntry( f'Unknown entrype type "{row_type}" encountered during ' f'cryptocom data import. Ignoring entry', ) def _import_cryptocom_associated_entries(self, data: Any, tx_kind: str) -> None: """Look for events that have associated entries and handle them as trades. This method looks for `*_debited` and `*_credited` entries using the same timestamp to handle them as one trade. Known kind: 'dynamic_coin_swap' or 'dust_conversion' May raise: - UnknownAsset if an unknown asset is encountered in the imported files - KeyError if a row contains unexpected data entries """ multiple_rows: Dict[Any, Dict[str, Any]] = {} investments_deposits: Dict[str, List[Any]] = defaultdict(list) investments_withdrawals: Dict[str, List[Any]] = defaultdict(list) debited_row = None credited_row = None for row in data: if row['Transaction Kind'] == f'{tx_kind}_debited': timestamp = deserialize_timestamp_from_date( date=row['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) if timestamp not in multiple_rows: multiple_rows[timestamp] = {} if 'debited' not in multiple_rows[timestamp]: multiple_rows[timestamp]['debited'] = [] multiple_rows[timestamp]['debited'].append(row) elif row['Transaction Kind'] == f'{tx_kind}_credited': # They only is one credited row timestamp = deserialize_timestamp_from_date( date=row['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) if timestamp not in multiple_rows: multiple_rows[timestamp] = {} multiple_rows[timestamp]['credited'] = row elif row['Transaction Kind'] == f'{tx_kind}_deposit': asset = row['Currency'] investments_deposits[asset].append(row) elif row['Transaction Kind'] == f'{tx_kind}_withdrawal': asset = row['Currency'] investments_withdrawals[asset].append(row) for timestamp in multiple_rows: # When we convert multiple assets dust to CRO # in one time, it will create multiple debited rows with # the same timestamp debited_rows = multiple_rows[timestamp]['debited'] credited_row = multiple_rows[timestamp]['credited'] total_debited_usd = functools.reduce( lambda acc, row: acc + deserialize_asset_amount(row[ 'Native Amount (in USD)']), debited_rows, ZERO, ) # If the value of the transaction is too small (< 0,01$), # crypto.com will display 0 as native amount # if we have multiple debited rows, we can't import them # since we can't compute their dedicated rates, so we skip them if len(debited_rows) > 1 and total_debited_usd == 0: return if credited_row is not None and len(debited_rows) != 0: for debited_row in debited_rows: description = credited_row['Transaction Description'] notes = f'{description}\nSource: crypto.com (CSV import)' # No fees here fee = Fee(ZERO) fee_currency = A_USD base_asset = symbol_to_asset_or_token( credited_row['Currency']) quote_asset = symbol_to_asset_or_token( debited_row['Currency']) part_of_total = ( FVal(1) if len(debited_rows) == 1 else deserialize_asset_amount( debited_row["Native Amount (in USD)"], ) / total_debited_usd) quote_amount_sold = deserialize_asset_amount( debited_row['Amount'], ) * part_of_total base_amount_bought = deserialize_asset_amount( credited_row['Amount'], ) * part_of_total rate = Price(abs(base_amount_bought / quote_amount_sold)) trade = Trade( timestamp=timestamp, location=Location.CRYPTOCOM, base_asset=base_asset, quote_asset=quote_asset, trade_type=TradeType.BUY, amount=AssetAmount(base_amount_bought), rate=rate, fee=fee, fee_currency=fee_currency, link='', notes=notes, ) self.db.add_trades([trade]) # Compute investments profit if len(investments_withdrawals) != 0: for asset in investments_withdrawals: asset_object = symbol_to_asset_or_token(asset) if asset not in investments_deposits: log.error( f'Investment withdrawal without deposit at crypto.com. Ignoring ' f'staking info for asset {asset_object}', ) continue # Sort by date in ascending order withdrawals_rows = sorted( investments_withdrawals[asset], key=lambda x: deserialize_timestamp_from_date( date=x['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ), ) investments_rows = sorted( investments_deposits[asset], key=lambda x: deserialize_timestamp_from_date( date=x['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ), ) last_date = Timestamp(0) for withdrawal in withdrawals_rows: withdrawal_date = deserialize_timestamp_from_date( date=withdrawal['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) amount_deposited = ZERO for deposit in investments_rows: deposit_date = deserialize_timestamp_from_date( date=deposit['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) if last_date < deposit_date <= withdrawal_date: # Amount is negative amount_deposited += deserialize_asset_amount( deposit['Amount']) amount_withdrawal = deserialize_asset_amount( withdrawal['Amount']) # Compute profit profit = amount_withdrawal + amount_deposited if profit >= ZERO: last_date = withdrawal_date action = LedgerAction( identifier=0, # whatever is not used at insertion timestamp=withdrawal_date, action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=AssetAmount(profit), asset=asset_object, rate=None, rate_asset=None, link=None, notes=f'Stake profit for asset {asset}', ) self.db_ledger.add_ledger_action(action) def import_cryptocom_csv(self, filepath: Path) -> Tuple[bool, str]: with open(filepath, 'r', encoding='utf-8-sig') as csvfile: data = csv.DictReader(csvfile) try: # Notice: Crypto.com csv export gathers all swapping entries (`lockup_swap_*`, # `crypto_wallet_swap_*`, ...) into one entry named `dynamic_coin_swap_*`. self._import_cryptocom_associated_entries( data, 'dynamic_coin_swap') # reset the iterator csvfile.seek(0) # pass the header since seek(0) make the first row to be the header next(data) self._import_cryptocom_associated_entries( data, 'dust_conversion') csvfile.seek(0) next(data) self._import_cryptocom_associated_entries( data, 'interest_swap') csvfile.seek(0) next(data) self._import_cryptocom_associated_entries(data, 'invest') csvfile.seek(0) next(data) except KeyError as e: return False, f'Crypto.com csv missing entry for {str(e)}' except UnknownAsset as e: return False, f'Encountered unknown asset {str(e)} at crypto.com csv import' for row in data: try: self._consume_cryptocom_entry(row) except UnknownAsset as e: self.db.msg_aggregator.add_warning( f'During cryptocom CSV import found action with unknown ' f'asset {e.asset_name}. Ignoring entry', ) continue except DeserializationError as e: self.db.msg_aggregator.add_warning( f'Error during cryptocom CSV import deserialization. ' f'Error was {str(e)}. Ignoring entry', ) continue except UnsupportedCSVEntry as e: self.db.msg_aggregator.add_warning(str(e)) continue except KeyError as e: return False, str(e) return True, '' def _consume_blockfi_entry(self, csv_row: Dict[str, Any]) -> None: """ Process entry for BlockFi transaction history. Trades for this file are ignored and istead should be extracted from the file containing only trades. This method can raise: - UnsupportedBlockFiEntry - UnknownAsset - DeserializationError """ if len(csv_row['Confirmed At']) != 0: timestamp = deserialize_timestamp_from_date( date=csv_row['Confirmed At'], formatstr='%Y-%m-%d %H:%M:%S', location='BlockFi', ) else: log.debug(f'Ignoring unconfirmed BlockFi entry {csv_row}') return asset = symbol_to_asset_or_token(csv_row['Cryptocurrency']) amount = deserialize_asset_amount_force_positive(csv_row['Amount']) entry_type = csv_row['Transaction Type'] # BlockFI doesn't provide information about fees fee = Fee(ZERO) fee_asset = A_USD # Can be whatever if entry_type in ('Deposit', 'Wire Deposit', 'ACH Deposit'): asset_movement = AssetMovement( location=Location.BLOCKFI, category=AssetMovementCategory.DEPOSIT, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=fee, fee_asset=fee_asset, link='', ) self.db.add_asset_movements([asset_movement]) elif entry_type in ('Withdrawal', 'Wire Withdrawal', 'ACH Withdrawal'): asset_movement = AssetMovement( location=Location.BLOCKFI, category=AssetMovementCategory.WITHDRAWAL, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=fee, fee_asset=fee_asset, link='', ) self.db.add_asset_movements([asset_movement]) elif entry_type == 'Withdrawal Fee': action = LedgerAction( identifier=0, # whatever is not used at insertion timestamp=timestamp, action_type=LedgerActionType.EXPENSE, location=Location.BLOCKFI, amount=amount, asset=asset, rate=None, rate_asset=None, link=None, notes=f'{entry_type} from BlockFi', ) self.db_ledger.add_ledger_action(action) elif entry_type in ('Interest Payment', 'Bonus Payment', 'Referral Bonus'): action = LedgerAction( identifier=0, # whatever is not used at insertion timestamp=timestamp, action_type=LedgerActionType.INCOME, location=Location.BLOCKFI, amount=amount, asset=asset, rate=None, rate_asset=None, link=None, notes=f'{entry_type} from BlockFi', ) self.db_ledger.add_ledger_action(action) elif entry_type == 'Trade': pass else: raise UnsupportedCSVEntry( f'Unsuported entry {entry_type}. Data: {csv_row}') def import_blockfi_transactions_csv(self, filepath: Path) -> Tuple[bool, str]: """ Information for the values that the columns can have has been obtained from https://github.com/BittyTax/BittyTax/blob/06794f51223398759852d6853bc7112ffb96129a/bittytax/conv/parsers/blockfi.py#L67 """ with open(filepath, 'r', encoding='utf-8-sig') as csvfile: data = csv.DictReader(csvfile) for row in data: try: self._consume_blockfi_entry(row) except UnknownAsset as e: self.db.msg_aggregator.add_warning( f'During BlockFi CSV import found action with unknown ' f'asset {e.asset_name}. Ignoring entry', ) continue except DeserializationError as e: self.db.msg_aggregator.add_warning( f'Deserialization error during BlockFi CSV import. ' f'{str(e)}. Ignoring entry', ) continue except UnsupportedCSVEntry as e: self.db.msg_aggregator.add_warning(str(e)) continue except KeyError as e: return False, str(e) return True, '' def _consume_blockfi_trade(self, csv_row: Dict[str, Any]) -> None: """ Consume the file containing only trades from BlockFi. As per my investigations (@yabirgb) this file can only contain confirmed trades. - UnknownAsset - DeserializationError """ timestamp = deserialize_timestamp_from_date( date=csv_row['Date'], formatstr='%Y-%m-%d %H:%M:%S', location='BlockFi', ) buy_asset = symbol_to_asset_or_token(csv_row['Buy Currency']) buy_amount = deserialize_asset_amount(csv_row['Buy Quantity']) sold_asset = symbol_to_asset_or_token(csv_row['Sold Currency']) sold_amount = deserialize_asset_amount(csv_row['Sold Quantity']) if sold_amount == ZERO: log.debug( f'Ignoring BlockFi trade with sold_amount equal to zero. {csv_row}' ) return rate = Price(buy_amount / sold_amount) trade = Trade( timestamp=timestamp, location=Location.BLOCKFI, base_asset=buy_asset, quote_asset=sold_asset, trade_type=TradeType.BUY, amount=buy_amount, rate=rate, fee=None, # BlockFI doesn't provide this information fee_currency=None, link='', notes=csv_row['Type'], ) self.db.add_trades([trade]) def import_blockfi_trades_csv(self, filepath: Path) -> Tuple[bool, str]: """ Information for the values that the columns can have has been obtained from the issue in github #1674 """ with open(filepath, 'r', encoding='utf-8-sig') as csvfile: data = csv.DictReader(csvfile) for row in data: try: self._consume_blockfi_trade(row) except UnknownAsset as e: self.db.msg_aggregator.add_warning( f'During BlockFi CSV import found action with unknown ' f'asset {e.asset_name}. Ignoring entry', ) continue except DeserializationError as e: self.db.msg_aggregator.add_warning( f'Deserialization error during BlockFi CSV import. ' f'{str(e)}. Ignoring entry', ) continue except UnsupportedCSVEntry as e: self.db.msg_aggregator.add_warning(str(e)) continue except KeyError as e: return False, str(e) return True, '' def _consume_nexo(self, csv_row: Dict[str, Any]) -> None: """ Consume CSV file from NEXO. This method can raise: - UnsupportedNexoEntry - UnknownAsset - DeserializationError """ ignored_entries = ('ExchangeToWithdraw', 'DepositToExchange') if 'rejected' not in csv_row['Details']: timestamp = deserialize_timestamp_from_date( date=csv_row['Date / Time'], formatstr='%Y-%m-%d %H:%M', location='NEXO', ) else: log.debug(f'Ignoring rejected nexo entry {csv_row}') return asset = symbol_to_asset_or_token(csv_row['Currency']) amount = deserialize_asset_amount_force_positive(csv_row['Amount']) entry_type = csv_row['Type'] transaction = csv_row['Transaction'] if entry_type in ('Deposit', 'ExchangeDepositedOn', 'LockingTermDeposit'): asset_movement = AssetMovement( location=Location.NEXO, category=AssetMovementCategory.DEPOSIT, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=Fee(ZERO), fee_asset=A_USD, link=transaction, ) self.db.add_asset_movements([asset_movement]) elif entry_type in ('Withdrawal', 'WithdrawExchanged'): asset_movement = AssetMovement( location=Location.NEXO, category=AssetMovementCategory.WITHDRAWAL, address=None, transaction_id=None, timestamp=timestamp, asset=asset, amount=amount, fee=Fee(ZERO), fee_asset=A_USD, link=transaction, ) self.db.add_asset_movements([asset_movement]) elif entry_type == 'Withdrawal Fee': action = LedgerAction( identifier=0, # whatever is not used at insertion timestamp=timestamp, action_type=LedgerActionType.EXPENSE, location=Location.NEXO, amount=amount, asset=asset, rate=None, rate_asset=None, link=None, notes=f'{entry_type} from Nexo', ) self.db_ledger.add_ledger_action(action) elif entry_type in ('Interest', 'Bonus', 'Dividend'): action = LedgerAction( identifier=0, # whatever is not used at insertion timestamp=timestamp, action_type=LedgerActionType.INCOME, location=Location.NEXO, amount=amount, asset=asset, rate=None, rate_asset=None, link=transaction, notes=f'{entry_type} from Nexo', ) self.db_ledger.add_ledger_action(action) elif entry_type in ignored_entries: pass else: raise UnsupportedCSVEntry( f'Unsuported entry {entry_type}. Data: {csv_row}') def import_nexo_csv(self, filepath: Path) -> Tuple[bool, str]: """ Information for the values that the columns can have has been obtained from https://github.com/BittyTax/BittyTax/blob/06794f51223398759852d6853bc7112ffb96129a/bittytax/conv/parsers/nexo.py """ with open(filepath, 'r', encoding='utf-8-sig') as csvfile: data = csv.DictReader(csvfile) for row in data: try: self._consume_nexo(row) except UnknownAsset as e: self.db.msg_aggregator.add_warning( f'During Nexo CSV import found action with unknown ' f'asset {e.asset_name}. Ignoring entry', ) continue except DeserializationError as e: self.db.msg_aggregator.add_warning( f'Deserialization error during Nexo CSV import. ' f'{str(e)}. Ignoring entry', ) continue except UnsupportedCSVEntry as e: self.db.msg_aggregator.add_warning(str(e)) continue except KeyError as e: return False, str(e) return True, ''
def __init__(self, db: DBHandler) -> None: self.db = db self.db_ledger = DBLedgerActions(self.db, self.db.msg_aggregator)
def test_delete_grant_events(rotkehlchen_api_server): rotki = rotkehlchen_api_server.rest_api.rotkehlchen # Get and save data of 3 different grants in the DB id1 = 149 metadata1 = GitcoinGrantMetadata( grant_id=id1, name='Rotki - The portfolio tracker and accounting tool that protects your privacy', created_on=1571694841, ) json_data = { 'from_timestamp': 1622162468, # 28/05/2021 'to_timestamp': 1622246400, # 29/05/2021 'grant_id': id1, } response = requests.post(api_url_for( rotkehlchen_api_server, 'gitcoineventsresource', ), json=json_data) assert_proper_response(response) id2 = 184 metadata2 = GitcoinGrantMetadata( grant_id=id2, name='TrueBlocks', created_on=1575424305, ) json_data = { 'from_timestamp': 1622162468, # 28/05/2021 'to_timestamp': 1622246400, # 29/05/2021 'grant_id': id2, } response = requests.post(api_url_for( rotkehlchen_api_server, 'gitcoineventsresource', ), json=json_data) assert_proper_response(response) id3 = 223 metadata3 = GitcoinGrantMetadata( grant_id=id3, name='Ethereum Magicians', created_on=1578054753, ) json_data = { 'from_timestamp': 1622162468, # 28/05/2021 'to_timestamp': 1622246400, # 29/05/2021 'grant_id': id3, } response = requests.post(api_url_for( rotkehlchen_api_server, 'gitcoineventsresource', ), json=json_data) assert_proper_response(response) # make sure events are saved db = rotki.data.db ledgerdb = DBLedgerActions(db, db.msg_aggregator) actions = ledgerdb.get_ledger_actions( filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN), has_premium=True, ) assert len(actions) == 5 assert len([x for x in actions if x.extra_data.grant_id == id1]) == 3 assert len([x for x in actions if x.extra_data.grant_id == id2]) == 1 assert len([x for x in actions if x.extra_data.grant_id == id3]) == 1 # make sure db ranges were written queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id1}') assert queryrange == (1622162468, 1622246400) queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id2}') assert queryrange == (1622162468, 1622246400) queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id3}') assert queryrange == (1622162468, 1622246400) # make sure grant metadata were written assert ledgerdb.get_gitcoin_grant_metadata() == { id1: metadata1, id2: metadata2, id3: metadata3, } # delete 1 grant's data response = requests.delete(api_url_for( rotkehlchen_api_server, 'gitcoineventsresource', ), json={'grant_id': id2}) assert_proper_response(response) # check that they got deleted but rest is fine actions = ledgerdb.get_ledger_actions( filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN), has_premium=True, ) assert len(actions) == 4 assert len([x for x in actions if x.extra_data.grant_id == id1]) == 3 assert len([x for x in actions if x.extra_data.grant_id == id2]) == 0 assert len([x for x in actions if x.extra_data.grant_id == id3]) == 1 # make sure db ranges were written queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id1}') assert queryrange == (1622162468, 1622246400) assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id2}') is None queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id3}') assert queryrange == (1622162468, 1622246400) # make sure grant metadata were written assert ledgerdb.get_gitcoin_grant_metadata() == {id1: metadata1, id3: metadata3} # delete all remaining grant data response = requests.delete(api_url_for( rotkehlchen_api_server, 'gitcoineventsresource', )) assert_proper_response(response) # check that they got deleted but rest is fine actions = ledgerdb.get_ledger_actions( filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN), has_premium=True, ) assert len(actions) == 0 # make sure db ranges were written assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id1}') is None assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id2}') is None assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id3}') is None # make sure grant metadata were written assert ledgerdb.get_gitcoin_grant_metadata() == {}
def test_query_ledger_actions(events_historian, function_scope_messages_aggregator): """ Create actions and query the events historian to check that the history has events previous to the selected from_ts. This allows us to verify that actions before one period are counted in the PnL report to calculate cost basis. https://github.com/rotki/rotki/issues/2541 """ selected_timestamp = 10 db = DBLedgerActions(events_historian.db, function_scope_messages_aggregator) action = LedgerAction( identifier=0, # whatever timestamp=selected_timestamp - 2, action_type=LedgerActionType.INCOME, location=Location.EXTERNAL, amount=FVal(1), asset=A_ETH, rate=None, rate_asset=None, link=None, notes=None, ) db.add_ledger_action(action) action = LedgerAction( identifier=0, # whatever timestamp=selected_timestamp + 3, action_type=LedgerActionType.EXPENSE, location=Location.EXTERNAL, amount=FVal(0.5), asset=A_ETH, rate=None, rate_asset=None, link=None, notes=None, ) db.add_ledger_action(action) action = LedgerAction( identifier=0, # whatever timestamp=selected_timestamp + 5, action_type=LedgerActionType.INCOME, location=Location.EXTERNAL, amount=FVal(10), asset=A_USDC, rate=None, rate_asset=None, link=None, notes=None, ) db.add_ledger_action(action) actions, length = events_historian.query_ledger_actions( has_premium=True, from_ts=None, to_ts=Timestamp(selected_timestamp + 4), ) assert any((action.timestamp < selected_timestamp for action in actions)) assert length == 2
def __init__(self, db: DBHandler) -> None: self.db = db self.db_ledger = DBLedgerActions(self.db, self.db.msg_aggregator) self.session = requests.session() self.clr_payouts: Optional[List[Dict[str, Any]]] = None
def test_gitcoin_metadata(database): db = DBLedgerActions(database, database.msg_aggregator) db.set_gitcoin_grant_metadata( grant_id=1, name='foo', created_on=1, ) result = db.get_gitcoin_grant_metadata(1) assert result == { 1: GitcoinGrantMetadata(grant_id=1, name='foo', created_on=1) } # change existing grant metadata db.set_gitcoin_grant_metadata( grant_id=1, name='newfoo', created_on=2, ) result = db.get_gitcoin_grant_metadata(1) assert result == { 1: GitcoinGrantMetadata(grant_id=1, name='newfoo', created_on=2) } # add 2nd grant and check we can get both back db.set_gitcoin_grant_metadata( grant_id=2, name='boo', created_on=3, ) result = db.get_gitcoin_grant_metadata(2) assert result == { 2: GitcoinGrantMetadata(grant_id=2, name='boo', created_on=3) } assert db.get_gitcoin_grant_metadata() == { 1: GitcoinGrantMetadata(grant_id=1, name='newfoo', created_on=2), 2: GitcoinGrantMetadata(grant_id=2, name='boo', created_on=3), }
def assert_cryptocom_special_events_import_results(rotki: Rotkehlchen): """A utility function to help assert on correctness of importing data from crypto.com""" trades = rotki.data.db.get_trades() ledger_db = DBLedgerActions(rotki.data.db, rotki.msg_aggregator) ledger_actions = ledger_db.get_ledger_actions(None, None, None) warnings = rotki.msg_aggregator.consume_warnings() errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 0 assert len(warnings) == 0 expected_actions = [LedgerAction( identifier=5, timestamp=Timestamp(1609884000), action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=AssetAmount(FVal('1')), asset=symbol_to_asset_or_token('CRO'), rate=None, rate_asset=None, link=None, notes=None, ), LedgerAction( identifier=4, timestamp=Timestamp(1609884000), action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=AssetAmount(FVal('0.5')), asset=symbol_to_asset_or_token('MCO'), rate=None, rate_asset=None, link=None, notes=None, ), LedgerAction( identifier=3, timestamp=Timestamp(1609884000), action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=AssetAmount(FVal('1')), asset=symbol_to_asset_or_token('CRO'), rate=None, rate_asset=None, link=None, notes=None, ), LedgerAction( identifier=2, timestamp=Timestamp(1609797600), action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=AssetAmount(FVal('0.02005')), asset=A_BTC, rate=None, rate_asset=None, link=None, notes='Stake profit for asset BTC', ), LedgerAction( identifier=1, timestamp=Timestamp(1609624800), action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=AssetAmount(FVal('0.00005')), asset=A_BTC, rate=None, rate_asset=None, link=None, notes='Stake profit for asset BTC', )] assert list(reversed(expected_actions)) == ledger_actions expected_trades = [Trade( timestamp=Timestamp(1609884000), location=Location.CRYPTOCOM, base_asset=symbol_to_asset_or_token('CRO'), quote_asset=symbol_to_asset_or_token('MCO'), trade_type=TradeType.BUY, amount=AssetAmount(FVal('1')), rate=Price(FVal('10')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='MCO Earnings/Rewards Swap\nSource: crypto.com (CSV import)', )] assert trades == expected_trades
def assert_nexo_results(rotki: Rotkehlchen): """A utility function to help assert on correctness of importing data from nexo""" ledger_db = DBLedgerActions(rotki.data.db, rotki.msg_aggregator) ledger_actions = ledger_db.get_ledger_actions(None, None, None) asset_movements = rotki.data.db.get_asset_movements() warnings = rotki.msg_aggregator.consume_warnings() errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 0 assert len(warnings) == 0 expected_actions = [LedgerAction( identifier=3, timestamp=Timestamp(1565888464), action_type=LedgerActionType.INCOME, location=Location.NEXO, amount=AssetAmount(FVal('22.5653042')), asset=symbol_to_asset_or_token('NEXO'), rate=None, rate_asset=None, link='NXT0000000009', notes='Dividend from Nexo', ), LedgerAction( identifier=2, timestamp=Timestamp(1597492915), action_type=LedgerActionType.INCOME, location=Location.NEXO, amount=AssetAmount(FVal('10.3585507')), asset=symbol_to_asset_or_token('NEXO'), rate=None, rate_asset=None, link='NXT0000000007', notes='Dividend from Nexo', ), LedgerAction( identifier=1, timestamp=Timestamp(1614993620), action_type=LedgerActionType.INCOME, location=Location.NEXO, amount=AssetAmount(FVal('1')), asset=symbol_to_asset_or_token('USDC'), rate=None, rate_asset=None, link='NXT0000000002', notes='Interest from Nexo', )] expected_movements = [AssetMovement( location=Location.NEXO, category=AssetMovementCategory.DEPOSIT, timestamp=Timestamp(1556116964), address=None, transaction_id=None, asset=A_BTC, amount=AssetAmount(FVal('1')), fee_asset=A_USD, fee=Fee(ZERO), link='NXT0000000013', ), AssetMovement( location=Location.NEXO, category=AssetMovementCategory.WITHDRAWAL, timestamp=Timestamp(1556122699), address=None, transaction_id=None, asset=A_BTC, amount=AssetAmount(FVal('0.9995')), fee_asset=A_USD, fee=Fee(ZERO), link='NXT0000000012', ), AssetMovement( location=Location.NEXO, category=AssetMovementCategory.DEPOSIT, timestamp=Timestamp(1558720210), address=None, transaction_id=None, asset=symbol_to_asset_or_token('NEXO'), amount=AssetAmount(FVal('1.00001')), fee_asset=A_USD, fee=Fee(ZERO), link='NXT0000000011', ), AssetMovement( location=Location.NEXO, category=AssetMovementCategory.DEPOSIT, timestamp=Timestamp(1565912821), address=None, transaction_id=None, asset=A_EUR, amount=AssetAmount(FVal('10000')), fee_asset=A_USD, fee=Fee(ZERO), link='NXT0000000010', ), AssetMovement( location=Location.NEXO, category=AssetMovementCategory.WITHDRAWAL, timestamp=Timestamp(1608131364), address=None, transaction_id=None, asset=A_EUR, amount=AssetAmount(FVal('2000.79')), fee_asset=A_USD, fee=Fee(ZERO), link='NXT0000000005', ), AssetMovement( location=Location.NEXO, category=AssetMovementCategory.DEPOSIT, timestamp=Timestamp(1614366540), address=None, transaction_id=None, asset=A_EUR, amount=AssetAmount(FVal('10')), fee_asset=A_USD, fee=Fee(ZERO), link='NXT0000000003', ), AssetMovement( location=Location.NEXO, category=AssetMovementCategory.DEPOSIT, timestamp=Timestamp(1615024314), address=None, transaction_id=None, asset=symbol_to_asset_or_token('USDC'), amount=AssetAmount(FVal('1')), fee_asset=A_USD, fee=Fee(ZERO), link='NXT0000000001', )] assert ledger_actions == expected_actions assert asset_movements == expected_movements
class GitcoinAPI(): def __init__(self, db: DBHandler) -> None: self.db = db self.db_ledger = DBLedgerActions(self.db, self.db.msg_aggregator) self.session = requests.session() self.clr_payouts: Optional[List[Dict[str, Any]]] = None def _single_grant_api_query(self, query_str: str) -> Dict[str, Any]: backoff = 1 backoff_limit = 33 while backoff < backoff_limit: log.debug(f'Querying gitcoin: {query_str}') try: response = self.session.get(query_str, timeout=DEFAULT_TIMEOUT_TUPLE) except requests.exceptions.RequestException as e: if 'Max retries exceeded with url' in str(e): log.debug( f'Got max retries exceeded from gitcoin. Will ' f'backoff for {backoff} seconds.', ) gevent.sleep(backoff) backoff = backoff * 2 if backoff >= backoff_limit: raise RemoteError( 'Getting gitcoin error even ' 'after we incrementally backed off', ) from e continue raise RemoteError( f'Gitcoin API request failed due to {str(e)}') from e if response.status_code != 200: raise RemoteError( f'Gitcoin API request {response.url} failed ' f'with HTTP status code {response.status_code} and text ' f'{response.text}', ) try: json_ret = jsonloads_dict(response.text) except JSONDecodeError as e: raise RemoteError( f'Gitcoin API request {response.url} returned invalid ' f'JSON response: {response.text}', ) from e if 'error' in json_ret: raise RemoteError( f'Gitcoin API request {response.url} returned an error: {json_ret["error"]}', ) break # success return json_ret def get_history_from_db( self, grant_id: Optional[int], from_ts: Optional[Timestamp] = None, to_ts: Optional[Timestamp] = None, ) -> Dict[int, Dict[str, Any]]: grantid_to_metadata = self.db_ledger.get_gitcoin_grant_metadata( grant_id) grantid_to_events = defaultdict(list) events = self.db_ledger.get_gitcoin_grant_events( grant_id=grant_id, from_ts=from_ts, to_ts=to_ts, ) for event in events: grantid_to_events[event.extra_data.grant_id].append( event.serialize_for_gitcoin()) # type: ignore # noqa: E501 result = {} for grantid, serialized_events in grantid_to_events.items(): metadata = grantid_to_metadata.get(grantid) result[grantid] = { 'events': serialized_events, 'name': metadata.name if metadata else None, 'created_on': metadata.created_on if metadata else None, } return result def query_grant_history( self, grant_id: Optional[int], from_ts: Optional[Timestamp] = None, to_ts: Optional[Timestamp] = None, only_cache: bool = False, ) -> Dict[int, Dict[str, Any]]: """May raise: - RemotError if there is an error querying the gitcoin API - InputError if only_cache is False and grant_id is missing """ if only_cache: return self.get_history_from_db( grant_id=grant_id, from_ts=from_ts, to_ts=to_ts, ) if grant_id is None: raise InputError( 'Attempted to query gitcoin events from the api without specifying a grant id', ) entry_name = f'{GITCOIN_GRANTS_PREFIX}_{grant_id}' dbranges = DBQueryRanges(self.db) from_timestamp = GITCOIN_START_TS if from_ts is None else from_ts to_timestamp = ts_now() if to_ts is None else to_ts ranges = dbranges.get_location_query_ranges( location_string=entry_name, start_ts=from_timestamp, end_ts=to_timestamp, ) grant_created_on: Optional[Timestamp] = None for period_range in ranges: actions, grant_created_on = self.query_grant_history_period( grant_id=grant_id, grant_created_on=grant_created_on, from_timestamp=period_range[0], to_timestamp=period_range[1], ) self.db_ledger.add_ledger_actions(actions) dbranges.update_used_query_range( location_string=entry_name, start_ts=from_timestamp, end_ts=to_timestamp, ranges_to_query=ranges, ) return self.get_history_from_db( grant_id=grant_id, from_ts=from_ts, to_ts=to_ts, ) def query_grant_history_period( self, grant_id: int, grant_created_on: Optional[Timestamp], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> Tuple[List[LedgerAction], Optional[Timestamp]]: transactions: List[Dict[str, Any]] = [] if grant_created_on is None: query_str = ( f'https://gitcoin.co/api/v0.1/grants/contributions_rec_report/' f'?id={grant_id}&from_timestamp=2017-09-25&to_timestamp=2017-09-25' ) result = self._single_grant_api_query(query_str) try: grant_created_on = deserialize_timestamp_from_date( date=result['metadata']['created_on'], formatstr='%Y-%m-%dT%H:%M:%S', location='Gitcoin API', skip_milliseconds=True, ) from_timestamp = max(grant_created_on, from_timestamp) grant_name = result['metadata']['grant_name'] self.db_ledger.set_gitcoin_grant_metadata( grant_id=grant_id, name=grant_name, created_on=grant_created_on, ) except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' log.error( f'Unexpected data encountered during deserialization of gitcoin api ' f'query: {result}. Error was: {msg}', ) # continue with the given from_timestamp step_to_timestamp = min(from_timestamp + MONTH_IN_SECONDS, to_timestamp) while from_timestamp != step_to_timestamp: transactions.extend( self.query_grant_history_period30d( grant_id=grant_id, from_ts=from_timestamp, to_ts=Timestamp(step_to_timestamp), ), ) from_timestamp = Timestamp(step_to_timestamp) step_to_timestamp = min(step_to_timestamp + MONTH_IN_SECONDS, to_timestamp) # Check if any of the clr_payouts are in the range if self.clr_payouts: for payout in self.clr_payouts: timestamp = deserialize_timestamp_from_date( date=payout['timestamp'], formatstr='%Y-%m-%dT%H:%M:%S', location='Gitcoin API', skip_milliseconds=True, ) if from_timestamp <= timestamp <= to_timestamp: round_num = payout.pop('round') payout['clr_round'] = round_num transactions.append(payout) actions = [] for transaction in transactions: try: action = _deserialize_transaction(grant_id=grant_id, rawtx=transaction) except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.db.msg_aggregator.add_error( 'Unexpected data encountered during deserialization of a gitcoin ' 'api query. Check logs for details.', ) log.error( f'Unexpected data encountered during deserialization of gitcoin api ' f'query: {transaction}. Error was: {msg}', ) continue except UnknownAsset as e: self.db.msg_aggregator.add_warning( f'Found unknown asset {str(e)} in a gitcoin api event transaction. ' 'Ignoring it.', ) continue except ZeroGitcoinAmount: log.warning( f'Found gitcoin event with 0 amount for grant {grant_id}. Ignoring' ) continue actions.append(action) return actions, grant_created_on def query_grant_history_period30d( self, grant_id: int, from_ts: Timestamp, to_ts: Timestamp, ) -> List[Dict[str, Any]]: transactions = [] from_date = timestamp_to_date(from_ts, formatstr='%Y-%m-%d') to_date = timestamp_to_date(to_ts, formatstr='%Y-%m-%d') page = 1 while True: query_str = ( f'https://gitcoin.co/api/v0.1/grants/contributions_rec_report/' f'?id={grant_id}&from_timestamp={from_date}&to_timestamp={to_date}' f'&page={page}&format=json') result = self._single_grant_api_query(query_str) transactions.extend(result['transactions']) if self.clr_payouts is None: self.clr_payouts = result.get('clr_payouts', []) if result['metadata']['has_next'] is False: break # else next page page += 1 return transactions