Пример #1
0
def maybe_add_external_trades_to_history(
    db: DBHandler,
    start_ts: Timestamp,
    end_ts: Timestamp,
    history: List[Trade],
    msg_aggregator: MessagesAggregator,
) -> List[Trade]:
    """
    Queries the DB to get any external trades, adds them to the provided history and returns it.

    If there is an unexpected error at the external trade deserialization an error is logged.
    """
    serialized_external_trades = db.get_trades()
    try:
        external_trades = trades_from_dictlist(
            given_trades=serialized_external_trades,
            start_ts=start_ts,
            end_ts=end_ts,
            location='external trades',
            msg_aggregator=msg_aggregator,
        )
    except KeyError:
        msg_aggregator.add_error(
            'External trades in the DB are in an unrecognized format')
        return history

    history.extend(external_trades)
    history.sort(key=lambda trade: trade.timestamp)

    return history
Пример #2
0
def maybe_add_external_trades_to_history(
    db: DBHandler,
    start_ts: Timestamp,
    end_ts: Timestamp,
    history: List[Union[Trade, MarginPosition]],
    msg_aggregator: MessagesAggregator,
) -> List[Union[Trade, MarginPosition]]:
    """
    Queries the DB to get any external trades, adds them to the provided history and returns it.

    If there is an unexpected error at the external trade deserialization an error is logged.
    """
    serialized_external_trades = db.get_trades()
    try:
        external_trades = trades_from_dictlist(
            given_trades=serialized_external_trades,
            start_ts=start_ts,
            end_ts=end_ts,
            location='external trades',
            msg_aggregator=msg_aggregator,
        )
    except (KeyError, DeserializationError):
        msg_aggregator.add_error(
            'External trades in the DB are in an unrecognized format')
        return history

    history.extend(external_trades)
    # TODO: We also sort in one other place in this file and also in accountant.py
    #       Get rid of the unneeded cases?
    history.sort(key=lambda trade: action_get_timestamp(trade))

    return history
Пример #3
0
def process_polo_loans(
    msg_aggregator: MessagesAggregator,
    data: List[Dict],
    start_ts: Timestamp,
    end_ts: Timestamp,
) -> List[Loan]:
    """Takes in the list of loans from poloniex as returned by the return_lending_history
    api call, processes it and returns it into our loan format
    """
    new_data = []

    for loan in reversed(data):
        log.debug('processing poloniex loan', **make_sensitive(loan))
        try:
            close_time = deserialize_timestamp_from_poloniex_date(
                loan['close'])
            open_time = deserialize_timestamp_from_poloniex_date(loan['open'])
            if open_time < start_ts:
                continue
            if close_time > end_ts:
                continue

            our_loan = Loan(
                location=Location.POLONIEX,
                open_time=open_time,
                close_time=close_time,
                currency=asset_from_poloniex(loan['currency']),
                fee=deserialize_fee(loan['fee']),
                earned=deserialize_asset_amount(loan['earned']),
                amount_lent=deserialize_asset_amount(loan['amount']),
            )
        except UnsupportedAsset as e:
            msg_aggregator.add_warning(
                f'Found poloniex loan with unsupported asset'
                f' {e.asset_name}. Ignoring it.', )
            continue
        except UnknownAsset as e:
            msg_aggregator.add_warning(
                f'Found poloniex loan with unknown asset'
                f' {e.asset_name}. Ignoring it.', )
            continue
        except (DeserializationError, KeyError) as e:
            msg = str(e)
            if isinstance(e, KeyError):
                msg = f'Missing key entry for {msg}.'
            msg_aggregator.add_error(
                'Deserialization error while reading a poloniex loan. Check '
                'logs for more details. Ignoring it.', )
            log.error(
                'Deserialization error while reading a poloniex loan',
                loan=loan,
                error=msg,
            )
            continue

        new_data.append(our_loan)

    new_data.sort(key=lambda loan: loan.open_time)
    return new_data
Пример #4
0
def query_zerion_address(
    ethereum: 'EthereumManager',
    msg_aggregator: MessagesAggregator,
) -> ChecksumEthAddress:
    """Queries the zerion contract address. If query fails, then last known
    address is used"""
    result = ethereum.ens_lookup('api.zerion.eth')
    if result is None:
        msg_aggregator.add_error(
            'Could not query api.zerion.eth address. Using last known address',
        )
        contract_address = ZERION_ADAPTER_ADDRESS
    else:
        contract_address = result

    return contract_address
Пример #5
0
def query_usd_price_zero_if_error(
    asset: Asset,
    time: Timestamp,
    location: str,
    msg_aggregator: MessagesAggregator,
) -> Price:
    try:
        usd_price = PriceHistorian().query_historical_price(
            from_asset=asset,
            to_asset=A_USD,
            timestamp=time,
        )
    except (RemoteError, NoPriceForGivenTimestamp):
        msg_aggregator.add_error(
            f'Could not query usd price for {asset.identifier} and time {time} '
            f'when processing {location}. Using zero price', )
        usd_price = Price(ZERO)

    return usd_price
Пример #6
0
class TaskManager():
    def __init__(
        self,
        max_tasks_num: int,
        greenlet_manager: GreenletManager,
        api_task_greenlets: List[gevent.Greenlet],
        database: DBHandler,
        cryptocompare: Cryptocompare,
        premium_sync_manager: Optional[PremiumSyncManager],
        chain_manager: ChainManager,
        exchange_manager: ExchangeManager,
        evm_tx_decoder: 'EVMTransactionDecoder',
        eth_transactions: 'EthTransactions',
        deactivate_premium: Callable,
        query_balances: Callable,
    ) -> None:
        self.max_tasks_num = max_tasks_num
        self.greenlet_manager = greenlet_manager
        self.api_task_greenlets = api_task_greenlets
        self.database = database
        self.cryptocompare = cryptocompare
        self.exchange_manager = exchange_manager
        self.evm_tx_decoder = evm_tx_decoder
        self.eth_transactions = eth_transactions
        self.cryptocompare_queries: Set[CCHistoQuery] = set()
        self.chain_manager = chain_manager
        self.last_xpub_derivation_ts = 0
        self.last_eth_tx_query_ts: DefaultDict[ChecksumEthAddress,
                                               int] = defaultdict(int)
        self.last_exchange_query_ts: DefaultDict[ExchangeLocationID,
                                                 int] = defaultdict(int)
        self.base_entries_ignore_set: Set[str] = set()
        self.prepared_cryptocompare_query = False
        self.greenlet_manager.spawn_and_track(  # Needs to run in greenlet, is slow
            after_seconds=None,
            task_name='Prepare cryptocompare queries',
            exception_is_error=True,
            method=self._prepare_cryptocompare_queries,
        )
        self.deactivate_premium = deactivate_premium
        self.query_balances = query_balances
        self.last_premium_status_check = ts_now()
        self.msg_aggregator = MessagesAggregator()

        self.potential_tasks = [
            self._maybe_schedule_cryptocompare_query,
            self._maybe_schedule_xpub_derivation,
            self._maybe_query_ethereum_transactions,
            self._maybe_schedule_exchange_history_query,
            self._maybe_schedule_ethereum_txreceipts,
            self._maybe_query_missing_prices,
            self._maybe_decode_evm_transactions,
            self._maybe_check_premium_status,
            self._maybe_update_snapshot_balances,
        ]
        if premium_sync_manager is not None:
            self.potential_tasks.append(
                premium_sync_manager.maybe_upload_data_to_server)
        self.schedule_lock = gevent.lock.Semaphore()

    def _prepare_cryptocompare_queries(self) -> None:
        """Prepare the queries to do to cryptocompare

        This would be really slow if the entire json cache files were read but we
        have implemented get_cached_data_metadata to only read the relevant part of the file.
        Before doing that we had to yield with gevent.sleep() at each loop iteration.

        Runs only once in the beginning and then has a number of queries prepared
        for the task manager to schedule
        """
        now_ts = ts_now()
        if self.prepared_cryptocompare_query is True:
            return

        if len(self.cryptocompare_queries) != 0:
            return

        assets = self.database.query_owned_assets()
        main_currency = self.database.get_main_currency()
        for asset in assets:

            if asset.is_fiat() and main_currency.is_fiat():
                continue  # ignore fiat to fiat

            if asset.cryptocompare == '' or main_currency.cryptocompare == '':
                continue  # not supported in cryptocompare

            if asset.cryptocompare is None and asset.symbol is None:
                continue  # type: ignore  # asset.symbol may be None for auto generated underlying tokens # noqa: E501

            data_range = GlobalDBHandler().get_historical_price_range(
                from_asset=asset,
                to_asset=main_currency,
                source=HistoricalPriceOracle.CRYPTOCOMPARE,
            )
            if data_range is not None and now_ts - data_range[
                    1] < CRYPTOCOMPARE_QUERY_AFTER_SECS:
                continue

            self.cryptocompare_queries.add(
                CCHistoQuery(from_asset=asset, to_asset=main_currency))

        self.prepared_cryptocompare_query = True

    def _maybe_schedule_cryptocompare_query(self) -> bool:
        """Schedules a cryptocompare query for a single asset history"""
        if self.prepared_cryptocompare_query is False:
            return False

        if len(self.cryptocompare_queries) == 0:
            return False

        # If there is already a cryptocompary query running don't schedule another
        if any('Cryptocompare historical prices' in x.task_name
               for x in self.greenlet_manager.greenlets):
            return False

        now_ts = ts_now()
        # Make sure there is a long enough period  between an asset's histohour query
        # to avoid getting rate limited by cryptocompare
        if now_ts - self.cryptocompare.last_histohour_query_ts <= CRYPTOCOMPARE_HISTOHOUR_FREQUENCY:  # noqa: E501
            return False

        query = self.cryptocompare_queries.pop()
        task_name = f'Cryptocompare historical prices {query.from_asset} / {query.to_asset} query'
        log.debug(f'Scheduling task for {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=False,
            method=self.cryptocompare.query_and_store_historical_data,
            from_asset=query.from_asset,
            to_asset=query.to_asset,
            timestamp=now_ts,
        )
        return True

    def _maybe_schedule_xpub_derivation(self) -> None:
        """Schedules the xpub derivation task if enough time has passed and if user has xpubs"""
        now = ts_now()
        if now - self.last_xpub_derivation_ts <= XPUB_DERIVATION_FREQUENCY:
            return

        xpubs = self.database.get_bitcoin_xpub_data()
        if len(xpubs) == 0:
            return

        log.debug('Scheduling task for Xpub derivation')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='Derive new xpub addresses',
            exception_is_error=True,
            method=XpubManager(
                self.chain_manager).check_for_new_xpub_addresses,
        )
        self.last_xpub_derivation_ts = now

    def _maybe_query_ethereum_transactions(self) -> None:
        """Schedules the ethereum transaction query task if enough time has passed"""
        accounts = self.database.get_blockchain_accounts().eth
        if len(accounts) == 0:
            return

        now = ts_now()
        dbethtx = DBEthTx(self.database)
        queriable_accounts = []
        for account in accounts:
            _, end_ts = dbethtx.get_queried_range(account)
            if now - max(self.last_eth_tx_query_ts[account],
                         end_ts) > ETH_TX_QUERY_FREQUENCY:
                queriable_accounts.append(account)

        if len(queriable_accounts) == 0:
            return

        address = random.choice(queriable_accounts)
        task_name = f'Query ethereum transactions for {address}'
        log.debug(f'Scheduling task to {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=True,
            method=self.eth_transactions.single_address_query_transactions,
            address=address,
            start_ts=0,
            end_ts=now,
        )
        self.last_eth_tx_query_ts[address] = now

    def _maybe_schedule_ethereum_txreceipts(self) -> None:
        """Schedules the ethereum transaction receipts query task

        The DB check happens first here to see if scheduling would even be needed.
        But the DB query will happen again inside the query task while having the
        lock acquired.
        """
        dbethtx = DBEthTx(self.database)
        hash_results = dbethtx.get_transaction_hashes_no_receipt(
            tx_filter_query=None, limit=TX_RECEIPTS_QUERY_LIMIT)  # noqa: E501
        if len(hash_results) == 0:
            return

        task_name = f'Query {len(hash_results)} ethereum transactions receipts'
        log.debug(f'Scheduling task to {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=True,
            method=self.eth_transactions.
            get_receipts_for_transactions_missing_them,
            limit=TX_RECEIPTS_QUERY_LIMIT,
        )

    def _maybe_schedule_exchange_history_query(self) -> None:
        """Schedules the exchange history query task if enough time has passed"""
        if len(self.exchange_manager.connected_exchanges) == 0:
            return

        now = ts_now()
        queriable_exchanges = []
        for exchange in self.exchange_manager.iterate_exchanges():
            if exchange.location in (Location.BINANCE, Location.BINANCEUS):
                continue  # skip binance due to the way their history is queried and rate limiting
            queried_range = self.database.get_used_query_range(
                f'{str(exchange.location)}_trades')
            end_ts = queried_range[1] if queried_range else 0
            if now - max(self.last_exchange_query_ts[exchange.location_id()],
                         end_ts) > EXCHANGE_QUERY_FREQUENCY:  # noqa: E501
                queriable_exchanges.append(exchange)

        if len(queriable_exchanges) == 0:
            return

        exchange = random.choice(queriable_exchanges)
        task_name = f'Query history of {exchange.name} exchange'
        log.debug(f'Scheduling task to {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=True,
            method=exchange.query_history_with_callbacks,
            start_ts=0,
            end_ts=now,
            success_callback=noop_exchange_success_cb,
            fail_callback=exchange_fail_cb,
        )
        self.last_exchange_query_ts[exchange.location_id()] = now

    def _maybe_query_missing_prices(self) -> None:
        query_filter = HistoryEventFilterQuery.make(limit=100)
        entries = self.get_base_entries_missing_prices(query_filter)
        if len(entries) > 0:
            task_name = 'Periodically query history events prices'
            log.debug(f'Scheduling task to {task_name}')
            self.greenlet_manager.spawn_and_track(
                after_seconds=None,
                task_name=task_name,
                exception_is_error=True,
                method=self.query_missing_prices_of_base_entries,
                entries_missing_prices=entries,
            )

    def get_base_entries_missing_prices(
        self,
        query_filter: HistoryEventFilterQuery,
    ) -> List[Tuple[str, FVal, Asset, Timestamp]]:
        """
        Searches base entries missing usd prices that have not previously been checked in
        this session.
        """
        # Use a deepcopy to avoid mutations in the filter query if it is used later
        db = DBHistoryEvents(self.database)
        new_query_filter = copy.deepcopy(query_filter)
        new_query_filter.filters.append(
            DBStringFilter(and_op=True, column='usd_value', value='0'), )
        new_query_filter.filters.append(
            DBIgnoreValuesFilter(
                and_op=True,
                column='identifier',
                values=list(self.base_entries_ignore_set),
            ), )
        return db.rows_missing_prices_in_base_entries(
            filter_query=new_query_filter)

    def query_missing_prices_of_base_entries(
        self,
        entries_missing_prices: List[Tuple[str, FVal, Asset, Timestamp]],
    ) -> None:
        """Queries missing prices for HistoryBaseEntry in database updating
        the price if it is found. Otherwise we add the id to the ignore list
        for this session.
        """
        inquirer = PriceHistorian()
        updates = []
        for identifier, amount, asset, timestamp in entries_missing_prices:
            try:
                price = inquirer.query_historical_price(
                    from_asset=asset,
                    to_asset=A_USD,
                    timestamp=timestamp,
                )
            except (NoPriceForGivenTimestamp, RemoteError) as e:
                log.error(
                    f'Failed to find price for {asset} at {timestamp} in base '
                    f'entry {identifier}. {str(e)}.', )
                self.base_entries_ignore_set.add(identifier)
                continue

            usd_value = amount * price
            updates.append((str(usd_value), identifier))

        query = 'UPDATE history_events SET usd_value=? WHERE rowid=?'
        cursor = self.database.conn.cursor()
        cursor.executemany(query, updates)
        self.database.update_last_write()

    def _maybe_decode_evm_transactions(self) -> None:
        """Schedules the evm transaction decoding task

        The DB check happens first here to see if scheduling would even be needed.
        But the DB query will happen again inside the query task while having the
        lock acquired.
        """
        dbethtx = DBEthTx(self.database)
        hashes = dbethtx.get_transaction_hashes_not_decoded(
            limit=TX_DECODING_LIMIT)
        hashes_length = len(hashes)
        if hashes_length > 0:
            task_name = f'decode {hashes_length} evm trasactions'
            log.debug(f'Scheduling periodic task to {task_name}')
            self.greenlet_manager.spawn_and_track(
                after_seconds=None,
                task_name=task_name,
                exception_is_error=True,
                method=self.evm_tx_decoder.
                get_and_decode_undecoded_transactions,
                limit=TX_DECODING_LIMIT,
            )

    def _maybe_check_premium_status(self) -> None:
        """
        Validates the premium status of the account and if the credentials are not valid
        it deactivates the user's premium status.
        """
        now = ts_now()
        if now - self.last_premium_status_check < PREMIUM_STATUS_CHECK:
            return

        db_credentials = self.database.get_rotkehlchen_premium()
        if db_credentials:
            try:
                premium_create_and_verify(db_credentials)
            except PremiumAuthenticationError as e:
                message = (
                    f'Could not authenticate with the rotkehlchen server with '
                    f'the API keys found in the Database. Error: {str(e)}. Will '
                    f'deactivate the premium status.')
                self.msg_aggregator.add_error(message)
                self.deactivate_premium()
        self.last_premium_status_check = now

    def _maybe_update_snapshot_balances(self) -> None:
        """
        Update the balances of a user if the difference between last time they were updated
        and the current time exceeds the `balance_save_frequency`.
        """
        if self.database.should_save_balances():
            task_name = 'Periodically update snapshot balances'
            log.debug(f'Scheduling task to {task_name}')
            self.greenlet_manager.spawn_and_track(
                after_seconds=None,
                task_name=task_name,
                exception_is_error=True,
                method=self.query_balances,
                requested_save_data=True,
                save_despite_errors=False,
                timestamp=None,
                ignore_cache=True,
            )

    def _schedule(self) -> None:
        """Schedules background tasks"""
        self.greenlet_manager.clear_finished()
        current_greenlets = len(self.greenlet_manager.greenlets) + len(
            self.api_task_greenlets)
        not_proceed = current_greenlets >= self.max_tasks_num
        log.debug(
            f'At task scheduling. Current greenlets: {current_greenlets} '
            f'Max greenlets: {self.max_tasks_num}. '
            f'{"Will not schedule" if not_proceed else "Will schedule"}.', )
        if not_proceed:
            return  # too busy

        callables = random.sample(
            population=self.potential_tasks,
            k=min(self.max_tasks_num - current_greenlets,
                  len(self.potential_tasks)),
        )

        for callable_fn in callables:
            callable_fn()

    def schedule(self) -> None:
        """Schedules background task while holding the scheduling lock

        Used during logout to make sure no task is being scheduled at the same time
        as logging out
        """
        with self.schedule_lock:
            self._schedule()
Пример #7
0
class Rotkehlchen():
    def __init__(self, args: argparse.Namespace) -> None:
        """Initialize the Rotkehlchen object

        May Raise:
        - SystemPermissionError if the given data directory's permissions
        are not correct.
        """
        self.lock = Semaphore()
        self.lock.acquire()

        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in: bool = False
        configure_logging(args)

        self.sleep_secs = args.sleep_secs
        if args.data_dir is None:
            self.data_dir = default_data_directory()
        else:
            self.data_dir = Path(args.data_dir)

        if not os.access(self.data_dir, os.W_OK | os.R_OK):
            raise SystemPermissionError(
                f'The given data directory {self.data_dir} is not readable or writable',
            )
        self.args = args
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(msg_aggregator=self.msg_aggregator)
        # Initialize the AssetResolver singleton
        AssetResolver(data_directory=self.data_dir)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None)
        self.coingecko = Coingecko()
        self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko)
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='periodically_query_icons_until_all_cached',
            method=self.icon_manager.periodically_query_icons_until_all_cached,
            batch_size=ICONS_BATCH_SIZE,
            sleep_time_secs=ICONS_QUERY_SLEEP,
        )
        # Initialize the Inquirer singleton
        Inquirer(
            data_dir=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        # Keeps how many trades we have found per location. Used for free user limiting
        self.actions_per_location: Dict[str, Dict[Location, int]] = {
            'trade': defaultdict(int),
            'asset_movement': defaultdict(int),
        }

        self.lock.release()
        self.shutdown_event = gevent.event.Event()

    def reset_after_failed_account_creation_or_login(self) -> None:
        """If the account creation or login failed make sure that the Rotki instance is clear

        Tricky instances are when after either failed premium credentials or user refusal
        to sync premium databases we relogged in.
        """
        self.cryptocompare.db = None

    def unlock_user(
            self,
            user: str,
            password: str,
            create_new: bool,
            sync_approval: Literal['yes', 'no', 'unknown'],
            premium_credentials: Optional[PremiumCredentials],
            initial_settings: Optional[ModifiableDBSettings] = None,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True

        May raise:
        - PremiumAuthenticationError if the password can't unlock the database.
        - AuthenticationError if premium_credentials are given and are invalid
        or can't authenticate with the server
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        - SystemPermissionError if the directory or DB file can not be accessed
        """
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
            initial_settings=initial_settings,
        )

        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new, initial_settings)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data, password=password)
        # set the DB in the external services instances that need it
        self.cryptocompare.set_database(self.data.db)

        # Anything that was set above here has to be cleaned in case of failure in the next step
        # by reset_after_failed_account_creation_or_login()
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                given_premium_credentials=premium_credentials,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
            )
        except PremiumAuthenticationError:
            # Reraise it only if this is during the creation of a new account where
            # the premium credentials were given by the user
            if create_new:
                raise
            self.msg_aggregator.add_error(
                'Tried to synchronize the database from remote but the local password '
                'does not match the one the remote DB has. Please change the password '
                'to be the same as the password of the account you want to sync from ',
            )
            # else let's just continue. User signed in succesfully, but he just
            # has unauthenticable/invalid premium credentials remaining in his DB

        settings = self.get_settings()
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='submit_usage_analytics',
            method=maybe_submit_usage_analytics,
            should_submit=settings.submit_usage_analytics,
        )
        self.etherscan = Etherscan(database=self.data.db, msg_aggregator=self.msg_aggregator)
        self.beaconchain = BeaconChain(database=self.data.db, msg_aggregator=self.msg_aggregator)
        historical_data_start = settings.historical_data_start
        eth_rpc_endpoint = settings.eth_rpc_endpoint
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            history_date_start=historical_data_start,
            cryptocompare=self.cryptocompare,
        )
        self.accountant = Accountant(
            db=self.data.db,
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=settings.anonymized_logs)
        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        # Initialize blockchain querying modules
        ethereum_manager = EthereumManager(
            ethrpc_endpoint=eth_rpc_endpoint,
            etherscan=self.etherscan,
            database=self.data.db,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=ETHEREUM_NODES_TO_CONNECT_AT_START,
        )
        Inquirer().inject_ethereum(ethereum_manager)
        self.chain_manager = ChainManager(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            ethereum_manager=ethereum_manager,
            msg_aggregator=self.msg_aggregator,
            database=self.data.db,
            greenlet_manager=self.greenlet_manager,
            premium=self.premium,
            eth_modules=settings.active_modules,
            data_directory=self.data_dir,
            beaconchain=self.beaconchain,
        )
        self.trades_historian = TradesHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
            chain_manager=self.chain_manager,
        )
        self.user_is_logged_in = True
        log.debug('User unlocking complete')

    def logout(self) -> None:
        if not self.user_is_logged_in:
            return

        user = self.data.username
        log.info(
            'Logging out user',
            user=user,
        )
        self.greenlet_manager.clear()
        del self.chain_manager
        self.exchange_manager.delete_all_exchanges()

        # Reset rotkehlchen logger to default
        LoggingSettings(anonymized_logs=DEFAULT_ANONYMIZED_LOGS)

        del self.accountant
        del self.trades_historian
        del self.data_importer

        if self.premium is not None:
            del self.premium
        self.data.logout()
        self.password = ''
        self.cryptocompare.unset_database()

        # Make sure no messages leak to other user sessions
        self.msg_aggregator.consume_errors()
        self.msg_aggregator.consume_warnings()

        self.user_is_logged_in = False
        log.info(
            'User successfully logged out',
            user=user,
        )

    def set_premium_credentials(self, credentials: PremiumCredentials) -> None:
        """
        Sets the premium credentials for Rotki

        Raises PremiumAuthenticationError if the given key is rejected by the Rotkehlchen server
        """
        log.info('Setting new premium credentials')
        if self.premium is not None:
            self.premium.set_credentials(credentials)
        else:
            self.premium = premium_create_and_verify(credentials)

        self.data.db.set_rotkehlchen_premium(credentials)

    def delete_premium_credentials(self) -> Tuple[bool, str]:
        """Deletes the premium credentials for Rotki"""
        msg = ''

        success = self.data.db.del_rotkehlchen_premium()
        if success is False:
            msg = 'The database was unable to delete the Premium keys for the logged-in user'
        self.deactivate_premium_status()
        return success, msg

    def deactivate_premium_status(self) -> None:
        """Deactivate premium in the current session"""
        self.premium = None
        self.premium_sync_manager.premium = None
        self.chain_manager.deactivate_premium_status()

    def start(self) -> gevent.Greenlet:
        return gevent.spawn(self.main_loop)

    def main_loop(self) -> None:
        """Rotki main loop that fires often and manages many different tasks

        Each task remembers the last time it run successfully and know how often it
        should run. So each task manages itself.
        """
        # super hacky -- organize better when recurring tasks are implemented
        # https://github.com/rotki/rotki/issues/1106
        xpub_derivation_scheduled = False
        while self.shutdown_event.wait(MAIN_LOOP_SECS_DELAY) is not True:
            if self.user_is_logged_in:
                log.debug('Main loop start')
                self.premium_sync_manager.maybe_upload_data_to_server()
                if not xpub_derivation_scheduled:
                    # 1 minute in the app's startup try to derive new xpub addresses
                    self.greenlet_manager.spawn_and_track(
                        after_seconds=60.0,
                        task_name='Derive new xpub addresses',
                        method=XpubManager(self.chain_manager).check_for_new_xpub_addresses,
                    )
                    xpub_derivation_scheduled = True
                log.debug('Main loop end')

    def get_blockchain_account_data(
            self,
            blockchain: SupportedBlockchain,
    ) -> Union[List[BlockchainAccountData], Dict[str, Any]]:
        account_data = self.data.db.get_blockchain_account_data(blockchain)
        if blockchain != SupportedBlockchain.BITCOIN:
            return account_data

        xpub_data = self.data.db.get_bitcoin_xpub_data()
        addresses_to_account_data = {x.address: x for x in account_data}
        address_to_xpub_mappings = self.data.db.get_addresses_to_xpub_mapping(
            list(addresses_to_account_data.keys()),  # type: ignore
        )

        xpub_mappings: Dict['XpubData', List[BlockchainAccountData]] = {}
        for address, xpub_entry in address_to_xpub_mappings.items():
            if xpub_entry not in xpub_mappings:
                xpub_mappings[xpub_entry] = []
            xpub_mappings[xpub_entry].append(addresses_to_account_data[address])

        data: Dict[str, Any] = {'standalone': [], 'xpubs': []}
        # Add xpub data
        for xpub_entry in xpub_data:
            data_entry = xpub_entry.serialize()
            addresses = xpub_mappings.get(xpub_entry, None)
            data_entry['addresses'] = addresses if addresses and len(addresses) != 0 else None
            data['xpubs'].append(data_entry)
        # Add standalone addresses
        for account in account_data:
            if account.address not in address_to_xpub_mappings:
                data['standalone'].append(account)

        return data

    def add_blockchain_accounts(
            self,
            blockchain: SupportedBlockchain,
            account_data: List[BlockchainAccountData],
    ) -> BlockchainBalancesUpdate:
        """Adds new blockchain accounts

        Adds the accounts to the blockchain instance and queries them to get the
        updated balances. Also adds them in the DB

        May raise:
        - EthSyncError from modify_blockchain_account
        - InputError if the given accounts list is empty.
        - TagConstraintError if any of the given account data contain unknown tags.
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        """
        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='adding',
            data_type='blockchain accounts',
        )
        address_type = blockchain.get_address_type()
        updated_balances = self.chain_manager.add_blockchain_accounts(
            blockchain=blockchain,
            accounts=[address_type(entry.address) for entry in account_data],
        )
        self.data.db.add_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return updated_balances

    def edit_blockchain_accounts(
            self,
            blockchain: SupportedBlockchain,
            account_data: List[BlockchainAccountData],
    ) -> None:
        """Edits blockchain accounts

        Edits blockchain account data for the given accounts

        May raise:
        - InputError if the given accounts list is empty or if
        any of the accounts to edit do not exist.
        - TagConstraintError if any of the given account data contain unknown tags.
        """
        # First check for validity of account data addresses
        if len(account_data) == 0:
            raise InputError('Empty list of blockchain account data to edit was given')
        accounts = [x.address for x in account_data]
        unknown_accounts = set(accounts).difference(self.chain_manager.accounts.get(blockchain))
        if len(unknown_accounts) != 0:
            raise InputError(
                f'Tried to edit unknown {blockchain.value} '
                f'accounts {",".join(unknown_accounts)}',
            )

        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='editing',
            data_type='blockchain accounts',
        )

        # Finally edit the accounts
        self.data.db.edit_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return None

    def remove_blockchain_accounts(
            self,
            blockchain: SupportedBlockchain,
            accounts: ListOfBlockchainAddresses,
    ) -> BlockchainBalancesUpdate:
        """Removes blockchain accounts

        Removes the accounts from the blockchain instance and queries them to get
        the updated balances. Also removes them from the DB

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - InputError if a non-existing account was given to remove
        """
        balances_update = self.chain_manager.remove_blockchain_accounts(
            blockchain=blockchain,
            accounts=accounts,
        )
        self.data.db.remove_blockchain_accounts(blockchain, accounts)
        return balances_update

    def process_history(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
    ) -> Tuple[Dict[str, Any], str]:
        (
            error_or_empty,
            history,
            loan_history,
            asset_movements,
            eth_transactions,
            defi_events,
        ) = self.trades_historian.get_history(
            start_ts=start_ts,
            end_ts=end_ts,
            has_premium=self.premium is not None,
        )
        result = self.accountant.process_history(
            start_ts=start_ts,
            end_ts=end_ts,
            trade_history=history,
            loan_history=loan_history,
            asset_movements=asset_movements,
            eth_transactions=eth_transactions,
            defi_events=defi_events,
        )
        return result, error_or_empty

    @overload
    def _apply_actions_limit(
            self,
            location: Location,
            action_type: Literal['trade'],
            location_actions: TRADES_LIST,
            all_actions: TRADES_LIST,
    ) -> TRADES_LIST:
        ...

    @overload
    def _apply_actions_limit(
            self,
            location: Location,
            action_type: Literal['asset_movement'],
            location_actions: List[AssetMovement],
            all_actions: List[AssetMovement],
    ) -> List[AssetMovement]:
        ...

    def _apply_actions_limit(
            self,
            location: Location,
            action_type: Literal['trade', 'asset_movement'],
            location_actions: Union[TRADES_LIST, List[AssetMovement]],
            all_actions: Union[TRADES_LIST, List[AssetMovement]],
    ) -> Union[TRADES_LIST, List[AssetMovement]]:
        """Take as many actions from location actions and add them to all actions as the limit permits

        Returns the modified (or not) all_actions
        """
        # If we are already at or above the limit return current actions disregarding this location
        actions_mapping = self.actions_per_location[action_type]
        current_num_actions = sum(x for _, x in actions_mapping.items())
        limit = LIMITS_MAPPING[action_type]
        if current_num_actions >= limit:
            return all_actions

        # Find out how many more actions can we return, and depending on that get
        # the number of actions from the location actions and add them to the total
        remaining_num_actions = limit - current_num_actions
        if remaining_num_actions < 0:
            remaining_num_actions = 0

        num_actions_to_take = min(len(location_actions), remaining_num_actions)

        actions_mapping[location] = num_actions_to_take
        all_actions.extend(location_actions[0:num_actions_to_take])  # type: ignore
        return all_actions

    def query_trades(
            self,
            from_ts: Timestamp,
            to_ts: Timestamp,
            location: Optional[Location],
    ) -> TRADES_LIST:
        """Queries trades for the given location and time range.
        If no location is given then all external, all exchange and DEX trades are queried.

        DEX Trades are queried only if the user has premium
        If the user does not have premium then a trade limit is applied.

        May raise:
        - RemoteError: If there are problems connecting to any of the remote exchanges
        """
        trades: TRADES_LIST
        if location is not None:
            trades = self.query_location_trades(from_ts, to_ts, location)
        else:
            trades = self.query_location_trades(from_ts, to_ts, Location.EXTERNAL)
            # crypto.com is not an API key supported exchange but user can import from CSV
            trades.extend(self.query_location_trades(from_ts, to_ts, Location.CRYPTOCOM))
            for name, exchange in self.exchange_manager.connected_exchanges.items():
                exchange_trades = exchange.query_trade_history(start_ts=from_ts, end_ts=to_ts)
                if self.premium is None:
                    trades = self._apply_actions_limit(
                        location=deserialize_location(name),
                        action_type='trade',
                        location_actions=exchange_trades,
                        all_actions=trades,
                    )
                else:
                    trades.extend(exchange_trades)

            # for all trades we also need uniswap trades
            if self.premium is not None:
                uniswap = self.chain_manager.uniswap
                if uniswap is not None:
                    trades.extend(
                        uniswap.get_trades(
                            addresses=self.chain_manager.queried_addresses_for_module('uniswap'),
                            from_timestamp=from_ts,
                            to_timestamp=to_ts,
                        ),
                    )

        # return trades with most recent first
        trades.sort(key=lambda x: x.timestamp, reverse=True)
        return trades

    def query_location_trades(
            self,
            from_ts: Timestamp,
            to_ts: Timestamp,
            location: Location,
    ) -> TRADES_LIST:
        # clear the trades queried for this location
        self.actions_per_location['trade'][location] = 0

        location_trades: TRADES_LIST
        if location in (Location.EXTERNAL, Location.CRYPTOCOM):
            location_trades = self.data.db.get_trades(  # type: ignore  # list invariance
                from_ts=from_ts,
                to_ts=to_ts,
                location=location,
            )
        elif location == Location.UNISWAP:
            if self.premium is not None:
                uniswap = self.chain_manager.uniswap
                if uniswap is not None:
                    location_trades = uniswap.get_trades(  # type: ignore  # list invariance
                        addresses=self.chain_manager.queried_addresses_for_module('uniswap'),
                        from_timestamp=from_ts,
                        to_timestamp=to_ts,
                    )
        else:
            # should only be an exchange
            exchange = self.exchange_manager.get(str(location))
            if not exchange:
                logger.warn(
                    f'Tried to query trades from {location} which is either not an '
                    f'exchange or not an exchange the user has connected to',
                )
                return []

            location_trades = exchange.query_trade_history(start_ts=from_ts, end_ts=to_ts)

        trades: TRADES_LIST = []
        if self.premium is None:
            trades = self._apply_actions_limit(
                location=location,
                action_type='trade',
                location_actions=location_trades,
                all_actions=trades,
            )
        else:
            trades = location_trades

        return trades

    def query_balances(
            self,
            requested_save_data: bool = False,
            timestamp: Timestamp = None,
            ignore_cache: bool = False,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are always saved in the DB,
        if it is False then data are saved if self.data.should_save_balances()
        is True.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB
        If ignore_cache is True then all underlying calls that have a cache ignore it

        Returns a dictionary with the queried balances.
        """
        log.info('query_balances called', requested_save_data=requested_save_data)

        balances = {}
        problem_free = True
        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange_balances, _ = exchange.query_balances(ignore_cache=ignore_cache)
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
            else:
                balances[exchange.name] = exchange_balances

        try:
            blockchain_result = self.chain_manager.query_balances(
                blockchain=None,
                force_token_detection=ignore_cache,
                ignore_cache=ignore_cache,
            )
            serialized_chain_result = blockchain_result.totals.to_dict()
            balances['blockchain'] = serialized_chain_result['assets']
        except (RemoteError, EthSyncError) as e:
            problem_free = False
            log.error(f'Querying blockchain balances failed due to: {str(e)}')

        balances = account_for_manually_tracked_balances(db=self.data.db, balances=balances)

        combined = combine_stat_dicts([v for k, v in balances.items()])
        total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value')) for k, v in balances.items()]
        liabilities = serialized_chain_result['liabilities']  # atm liabilities only on chain

        # calculate net usd value
        net_usd = ZERO
        for _, v in combined.items():
            net_usd += FVal(v['usd_value'])
        # subtract liabilities
        liabilities_total_usd = sum(x['usd_value'] for _, x in liabilities.items())
        net_usd -= liabilities_total_usd

        stats: Dict[str, Any] = {
            'location': {
            },
            'net_usd': net_usd,
        }
        for entry in total_usd_per_location:
            name = entry[0]
            total = entry[1]
            if name == 'blockchain':  # blockchain is the only location with liabilities atm
                total -= liabilities_total_usd

            if net_usd != ZERO:
                percentage = (total / net_usd).to_percentage()
            else:
                percentage = '0%'
            stats['location'][name] = {
                'usd_value': total,
                'percentage_of_net_value': percentage,
            }

        for k, v in combined.items():
            if net_usd != ZERO:
                percentage = (v['usd_value'] / net_usd).to_percentage()
            else:
                percentage = '0%'
            combined[k]['percentage_of_net_value'] = percentage

        for k, v in liabilities.items():
            if net_usd != ZERO:
                percentage = (v['usd_value'] / net_usd).to_percentage()
            else:
                percentage = '0%'
            liabilities[k]['percentage_of_net_value'] = percentage

        balance_sheet = {
            'assets': combined,
            'liabilities': liabilities,
        }

        result_dict = merge_dicts(balance_sheet, stats)

        allowed_to_save = requested_save_data or self.data.should_save_balances()

        if problem_free and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.db.save_balances_data(data=result_dict, timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
            )

        return result_dict

    def _query_exchange_asset_movements(
            self,
            from_ts: Timestamp,
            to_ts: Timestamp,
            all_movements: List[AssetMovement],
            exchange: Union[ExchangeInterface, Location],
    ) -> List[AssetMovement]:
        if isinstance(exchange, ExchangeInterface):
            location = deserialize_location(exchange.name)
            # clear the asset movements queried for this exchange
            self.actions_per_location['asset_movement'][location] = 0
            location_movements = exchange.query_deposits_withdrawals(
                start_ts=from_ts,
                end_ts=to_ts,
            )
        else:
            assert isinstance(exchange, Location), 'only a location should make it here'
            assert exchange == Location.CRYPTOCOM, 'only cryptocom should make it here'
            location = exchange
            # cryptocom has no exchange integration but we may have DB entries
            self.actions_per_location['asset_movement'][location] = 0
            location_movements = self.data.db.get_asset_movements(
                from_ts=from_ts,
                to_ts=to_ts,
                location=str(location),
            )

        movements: List[AssetMovement] = []
        if self.premium is None:
            movements = self._apply_actions_limit(
                location=location,
                action_type='asset_movement',
                location_actions=location_movements,
                all_actions=all_movements,
            )
        else:
            all_movements.extend(location_movements)
            movements = all_movements

        return movements

    def query_asset_movements(
            self,
            from_ts: Timestamp,
            to_ts: Timestamp,
            location: Optional[Location],
    ) -> List[AssetMovement]:
        """Queries AssetMovements for the given location and time range.

        If no location is given then all exchange asset movements are queried.
        If the user does not have premium then a limit is applied.
        May raise:
        - RemoteError: If there are problems connecting to any of the remote exchanges
        """
        movements: List[AssetMovement] = []
        if location is not None:
            if location == Location.CRYPTOCOM:
                movements = self._query_exchange_asset_movements(
                    from_ts=from_ts,
                    to_ts=to_ts,
                    all_movements=movements,
                    exchange=Location.CRYPTOCOM,
                )
            else:
                exchange = self.exchange_manager.get(str(location))
                if not exchange:
                    logger.warn(
                        f'Tried to query deposits/withdrawals from {location} which is either '
                        f'not at exchange or not an exchange the user has connected to',
                    )
                    return []
                movements = self._query_exchange_asset_movements(
                    from_ts=from_ts,
                    to_ts=to_ts,
                    all_movements=movements,
                    exchange=exchange,
                )
        else:
            # cryptocom has no exchange integration but we may have DB entries due to csv import
            movements = self._query_exchange_asset_movements(
                from_ts=from_ts,
                to_ts=to_ts,
                all_movements=movements,
                exchange=Location.CRYPTOCOM,
            )
            for _, exchange in self.exchange_manager.connected_exchanges.items():
                self._query_exchange_asset_movements(
                    from_ts=from_ts,
                    to_ts=to_ts,
                    all_movements=movements,
                    exchange=exchange,
                )

        # return movements with most recent first
        movements.sort(key=lambda x: x.timestamp, reverse=True)
        return movements

    def set_settings(self, settings: ModifiableDBSettings) -> Tuple[bool, str]:
        """Tries to set new settings. Returns True in success or False with message if error"""
        with self.lock:
            if settings.eth_rpc_endpoint is not None:
                result, msg = self.chain_manager.set_eth_rpc_endpoint(settings.eth_rpc_endpoint)
                if not result:
                    return False, msg

            if settings.kraken_account_type is not None:
                kraken = self.exchange_manager.get('kraken')
                if kraken:
                    kraken.set_account_type(settings.kraken_account_type)  # type: ignore

            self.data.db.set_settings(settings)
            return True, ''

    def get_settings(self) -> DBSettings:
        """Returns the db settings with a check whether premium is active or not"""
        db_settings = self.data.db.get_settings(have_premium=self.premium is not None)
        return db_settings

    def setup_exchange(
            self,
            name: str,
            api_key: ApiKey,
            api_secret: ApiSecret,
            passphrase: Optional[str] = None,
    ) -> Tuple[bool, str]:
        """
        Setup a new exchange with an api key and an api secret and optionally a passphrase

        By default the api keys are always validated unless validate is False.
        """
        is_success, msg = self.exchange_manager.setup_exchange(
            name=name,
            api_key=api_key,
            api_secret=api_secret,
            database=self.data.db,
            passphrase=passphrase,
        )

        if is_success:
            # Success, save the result in the DB
            self.data.db.add_exchange(name, api_key, api_secret, passphrase=passphrase)
        return is_success, msg

    def remove_exchange(self, name: str) -> Tuple[bool, str]:
        if not self.exchange_manager.has_exchange(name):
            return False, 'Exchange {} is not registered'.format(name)

        self.exchange_manager.delete_exchange(name)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name)
        self.data.db.delete_used_query_range_for_exchange(name)
        return True, ''

    def query_periodic_data(self) -> Dict[str, Union[bool, Timestamp]]:
        """Query for frequently changing data"""
        result: Dict[str, Union[bool, Timestamp]] = {}

        if self.user_is_logged_in:
            result['last_balance_save'] = self.data.db.get_last_balance_save_time()
            result['eth_node_connection'] = self.chain_manager.ethereum.web3_mapping.get(NodeName.OWN, None) is not None  # noqa : E501
            result['history_process_start_ts'] = self.accountant.started_processing_timestamp
            result['history_process_current_ts'] = self.accountant.currently_processing_timestamp
            result['last_data_upload_ts'] = Timestamp(self.premium_sync_manager.last_data_upload_ts)  # noqa : E501
        return result

    def shutdown(self) -> None:
        self.logout()
        self.shutdown_event.set()
Пример #8
0
def history_event_from_kraken(
    events: List[Dict[str, Any]],
    name: str,
    msg_aggregator: MessagesAggregator,
) -> Tuple[List[HistoryBaseEntry], bool]:
    """
    This function gets raw data from kraken and creates a list of related history events
    to be used in the app. It returns a list of events and a boolean in the case that an unknown
    type is found.
    """
    group_events = []
    found_unknown_event = False
    current_fee_index = len(events)
    for idx, raw_event in enumerate(events):
        try:
            timestamp = TimestampMS((deserialize_fval(
                value=raw_event['time'],
                name='time',
                location='kraken ledger processing',
            ) * 1000).to_int(exact=False))
            identifier = raw_event['refid']
            event_type = kraken_ledger_entry_type_to_ours(raw_event['type'])
            asset = asset_from_kraken(raw_event['asset'])
            event_subtype = HistoryEventSubType.NONE
            notes = None
            raw_amount = deserialize_asset_amount(raw_event['amount'])
            # If we don't know how to handle an event atm or we find an unsupported
            # event type the logic will be to store it as unknown and if in the future
            # we need some information from it we can take actions to process them
            if event_type == HistoryEventType.TRANSFER:
                if raw_event['subtype'] == 'spottostaking':
                    event_type = HistoryEventType.STAKING
                    event_subtype = HistoryEventSubType.DEPOSIT_ASSET
                elif raw_event['subtype'] == 'stakingfromspot':
                    event_type = HistoryEventType.STAKING
                    event_subtype = HistoryEventSubType.RECEIVE_WRAPPED
                elif raw_event['subtype'] == 'stakingtospot':
                    event_type = HistoryEventType.STAKING
                    event_subtype = HistoryEventSubType.REMOVE_ASSET
                elif raw_event['subtype'] == 'spotfromstaking':
                    event_type = HistoryEventType.STAKING
                    event_subtype = HistoryEventSubType.RETURN_WRAPPED
            elif event_type == HistoryEventType.ADJUSTMENT:
                if raw_amount < ZERO:
                    event_subtype = HistoryEventSubType.SPEND
                else:
                    event_subtype = HistoryEventSubType.RECEIVE
            elif event_type == HistoryEventType.STAKING:
                event_subtype = HistoryEventSubType.REWARD
            elif event_type == HistoryEventType.INFORMATIONAL:
                found_unknown_event = True
                notes = raw_event['type']
                log.warning(
                    f'Encountered kraken historic event type we do not process. {raw_event}',
                )
            fee_amount = deserialize_asset_amount(raw_event['fee'])

            # Make sure to not generate an event for KFEES that is not of type FEE
            if asset != A_KFEE:
                group_events.append(
                    HistoryBaseEntry(
                        event_identifier=identifier,
                        sequence_index=idx,
                        timestamp=timestamp,
                        location=Location.KRAKEN,
                        location_label=name,
                        asset=asset,
                        balance=Balance(
                            amount=raw_amount,
                            usd_value=ZERO,
                        ),
                        notes=notes,
                        event_type=event_type,
                        event_subtype=event_subtype,
                    ))
            if fee_amount != ZERO:
                group_events.append(
                    HistoryBaseEntry(
                        event_identifier=identifier,
                        sequence_index=current_fee_index,
                        timestamp=timestamp,
                        location=Location.KRAKEN,
                        location_label=name,
                        asset=asset,
                        balance=Balance(
                            amount=fee_amount,
                            usd_value=ZERO,
                        ),
                        notes=notes,
                        event_type=event_type,
                        event_subtype=HistoryEventSubType.FEE,
                    ))
                # Increase the fee index to not have duplicates in the case of having a normal
                # fee and KFEE
                current_fee_index += 1
        except (DeserializationError, KeyError, UnknownAsset) as e:
            msg = str(e)
            if isinstance(e, KeyError):
                msg = f'Keyrror {msg}'
            msg_aggregator.add_error(
                f'Failed to read ledger event from kraken {raw_event} due to {msg}',
            )
            return [], False
    return group_events, found_unknown_event