Beispiel #1
0
def price_historian(
    data_dir,
    inquirer,  # pylint: disable=unused-argument
    should_mock_price_queries,
    mocked_price_queries,
    cryptocompare,
    session_coingecko,
    default_mock_price_value,
    historical_price_oracles_order,
    dont_mock_price_for,
):
    # Since this is a singleton and we want it initialized everytime the fixture
    # is called make sure its instance is always starting from scratch
    PriceHistorian._PriceHistorian__instance = None
    historian = PriceHistorian(
        data_directory=data_dir,
        cryptocompare=cryptocompare,
        coingecko=session_coingecko,
    )
    historian.set_oracles_order(historical_price_oracles_order)
    maybe_mock_historical_price_queries(
        historian=historian,
        should_mock_price_queries=should_mock_price_queries,
        mocked_price_queries=mocked_price_queries,
        default_mock_value=default_mock_price_value,
        dont_mock_price_for=dont_mock_price_for,
    )

    return historian
Beispiel #2
0
def quote(asset: Tuple[str], amount: float, base_asset_: Optional[str], timestamp: Optional[str]):
    buchfink_db = BuchfinkDB()
    buchfink_db.perform_assets_updates()
    base_asset = buchfink_db.get_asset_by_symbol(base_asset_) \
            if base_asset_ \
            else buchfink_db.get_main_currency()
    base_in_usd = FVal(buchfink_db.inquirer.find_usd_price(base_asset))
    a_usd = buchfink_db.get_asset_by_symbol('USD')

    ds_timestamp = deserialize_timestamp(timestamp) if timestamp else None
    historian = PriceHistorian()

    for symbol in asset:
        asset_ = buchfink_db.get_asset_by_symbol(symbol)
        if ds_timestamp:
            asset_usd = historian.query_historical_price(
                    from_asset=asset_,
                    to_asset=a_usd,
                    timestamp=ds_timestamp
            )
        else:
            asset_usd = FVal(buchfink_db.inquirer.find_usd_price(asset_))
        click.echo('{} {} = {} {}'.format(
                click.style(f'{amount}', fg='white'),
                click.style(asset_.symbol, fg='green'),
                click.style(f'{FVal(amount) * asset_usd / base_in_usd}', fg='white'),
                click.style(base_asset.symbol, fg='green')
        ))
Beispiel #3
0
    def query_missing_prices_of_base_entries(
        self,
        entries_missing_prices: List[Tuple[str, FVal, Asset, Timestamp]],
    ) -> None:
        """Queries missing prices for HistoryBaseEntry in database updating
        the price if it is found. Otherwise we add the id to the ignore list
        for this session.
        """
        inquirer = PriceHistorian()
        updates = []
        for identifier, amount, asset, timestamp in entries_missing_prices:
            try:
                price = inquirer.query_historical_price(
                    from_asset=asset,
                    to_asset=A_USD,
                    timestamp=timestamp,
                )
            except (NoPriceForGivenTimestamp, RemoteError) as e:
                log.error(
                    f'Failed to find price for {asset} at {timestamp} in base '
                    f'entry {identifier}. {str(e)}.', )
                self.base_entries_ignore_set.add(identifier)
                continue

            usd_value = amount * price
            updates.append((str(usd_value), identifier))

        query = 'UPDATE history_events SET usd_value=? WHERE rowid=?'
        cursor = self.database.conn.cursor()
        cursor.executemany(query, updates)
        self.database.update_last_write()
Beispiel #4
0
    def set_settings(self, settings: ModifiableDBSettings) -> Tuple[bool, str]:
        """Tries to set new settings. Returns True in success or False with message if error"""
        if settings.eth_rpc_endpoint is not None:
            result, msg = self.chain_manager.set_eth_rpc_endpoint(
                settings.eth_rpc_endpoint)
            if not result:
                return False, msg

        if settings.ksm_rpc_endpoint is not None:
            result, msg = self.chain_manager.set_ksm_rpc_endpoint(
                settings.ksm_rpc_endpoint)
            if not result:
                return False, msg

        if settings.kraken_account_type is not None:
            kraken = self.exchange_manager.get('kraken')
            if kraken:
                kraken.set_account_type(
                    settings.kraken_account_type)  # type: ignore

        if settings.btc_derivation_gap_limit is not None:
            self.chain_manager.btc_derivation_gap_limit = settings.btc_derivation_gap_limit

        if settings.current_price_oracles is not None:
            Inquirer().set_oracles_order(settings.current_price_oracles)

        if settings.historical_price_oracles is not None:
            PriceHistorian().set_oracles_order(
                settings.historical_price_oracles)
        if settings.active_modules is not None:
            self.chain_manager.process_new_modules_list(
                settings.active_modules)

        self.data.db.set_settings(settings)
        return True, ''
Beispiel #5
0
 def query_historical_price(from_asset: Asset, to_asset: Asset,
                            timestamp: Timestamp):
     return PriceHistorian().query_historical_price(
         from_asset=from_asset,
         to_asset=to_asset,
         timestamp=timestamp,
     )
Beispiel #6
0
    def _update_events_value(
        self,
        staking_events: ADXStakingEvents,
    ) -> None:
        # Update amounts for unbonds and unbond requests
        bond_id_bond_map: Dict[str, Optional[Bond]] = {
            bond.bond_id: bond
            for bond in staking_events.bonds
        }
        for event in (
                staking_events.unbonds + staking_events.
                unbond_requests  # type: ignore # mypy bug concatenating lists
        ):
            has_bond = True
            bond = bond_id_bond_map.get(event.bond_id, None)
            if bond:
                event.value = Balance(amount=bond.value.amount)
                event.pool_id = bond.pool_id
            elif event.bond_id not in bond_id_bond_map:
                bond_id_bond_map[event.bond_id] = None
                db_bonds = cast(
                    List[Bond],
                    self.database.get_adex_events(
                        bond_id=event.bond_id,
                        event_type=AdexEventType.BOND,
                    ))
                if db_bonds:
                    db_bond = db_bonds[0]
                    bond_id_bond_map[event.bond_id] = db_bond
                    event.value = Balance(amount=db_bond.value.amount)
                    event.pool_id = db_bond.pool_id
                else:
                    has_bond = False
            else:
                has_bond = False

            if has_bond is False:
                log.warning(
                    'Failed to update an AdEx event data. Unable to find its related bond event',
                    event=event,
                )

        # Update usd_value for all events
        for event in staking_events.get_all(
        ):  # type: ignore # event can have all types
            token = event.token if isinstance(event,
                                              ChannelWithdraw) else A_ADX
            usd_price = PriceHistorian().query_historical_price(
                from_asset=token,
                to_asset=A_USD,
                timestamp=event.timestamp,
            )
            event.value.usd_value = event.value.amount * usd_price
Beispiel #7
0
def fixture_fake_price_historian(historical_price_oracles_order):
    # NB: custom fixture for quick unit testing. Do not export.
    # Since this is a singleton and we want it initialized everytime the fixture
    # is called make sure its instance is always starting from scratch
    PriceHistorian._PriceHistorian__instance = None
    price_historian = PriceHistorian(
        data_directory=MagicMock(spec=Path),
        cryptocompare=MagicMock(spec=Cryptocompare),
        coingecko=MagicMock(spec=Coingecko),
    )
    price_historian.set_oracles_order(historical_price_oracles_order)
    return price_historian
Beispiel #8
0
    def process_gitcoin(
            self,
            from_ts: Optional[Timestamp],
            to_ts: Optional[Timestamp],
            grant_id: Optional[int],
    ) -> Tuple[Asset, Dict[int, GitcoinReport]]:
        """Processess gitcoin transactions in the given time range and creates a report"""
        actions = self.db_ledger.get_gitcoin_grant_events(
            grant_id=grant_id,
            from_ts=from_ts,
            to_ts=to_ts,
        )
        profit_currency = self.db.get_main_currency()
        reports: DefaultDict[int, GitcoinReport] = defaultdict(GitcoinReport)

        for entry in actions:
            balance = Balance(amount=entry.amount)
            if entry.rate_asset is None or entry.rate is None:
                log.error(
                    f'Found gitcoin ledger action for {entry.amount} {entry.asset} '
                    f'without a rate asset. Should not happen. Entry was '
                    f'possibly edited by hand. Skipping.',
                )
                continue

            report = reports[entry.extra_data.grant_id]  # type: ignore
            rate = entry.rate
            if entry.rate_asset != profit_currency:
                try:
                    profit_currency_in_rate_asset = PriceHistorian().query_historical_price(
                        from_asset=profit_currency,
                        to_asset=entry.rate_asset,
                        timestamp=entry.timestamp,
                    )
                except NoPriceForGivenTimestamp as e:
                    self.db.msg_aggregator.add_error(
                        f'{str(e)} when processing gitcoin entry. Skipping entry.',
                    )
                    continue
                rate = entry.rate / profit_currency_in_rate_asset  # type: ignore  # checked above

            value_in_profit_currency = entry.amount * rate
            balance.usd_value = value_in_profit_currency
            report.per_asset[entry.asset] += balance
            report.total += value_in_profit_currency

        return self.db.get_main_currency(), reports
Beispiel #9
0
    def get_fee_in_profit_currency(self, trade: Trade) -> Fee:
        """Get the profit_currency rate of the fee of the given trade

        May raise:
        - PriceQueryUnsupportedAsset if from/to asset is missing from all price oracles
        - NoPriceForGivenTimestamp if we can't find a price for the asset in the given
        timestamp from the price oracle
        - RemoteError if there is a problem reaching the price oracle server
        or with reading the response returned by the server
        """
        if trade.fee_currency is None or trade.fee is None:
            return Fee(ZERO)

        fee_rate = PriceHistorian().query_historical_price(
            from_asset=trade.fee_currency,
            to_asset=self.profit_currency,
            timestamp=trade.timestamp,
        )
        return Fee(fee_rate * trade.fee)
Beispiel #10
0
    def get_rate_in_profit_currency(self, asset: Asset,
                                    timestamp: Timestamp) -> FVal:
        """Get the profit_currency price of asset in the given timestamp

        May raise:
        - PriceQueryUnsupportedAsset if from/to asset is missing from price oracles
        - NoPriceForGivenTimestamp if we can't find a price for the asset in the given
        timestamp from the price oracle
        - RemoteError if there is a problem reaching the price oracle server
        or with reading the response returned by the server
        """
        if asset == self.profit_currency:
            rate = FVal(1)
        else:
            rate = PriceHistorian().query_historical_price(
                from_asset=asset,
                to_asset=self.profit_currency,
                timestamp=timestamp,
            )
        return rate
Beispiel #11
0
    def get_staking_history(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
    ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]:
        try:
            staked = self._get_raw_history(addresses, 'stake')
        except RemoteError as e:
            log.error(
                f'Failed to query stake graph events for liquity. {str(e)}')
            staked = {}

        result: Dict[ChecksumEthAddress,
                     List[LiquityEvent]] = defaultdict(list)
        for stake in staked.get('lqtyStakes', []):
            owner = to_checksum_address(stake['id'])
            for change in stake['changes']:
                try:
                    timestamp = change['transaction']['timestamp']
                    if timestamp < from_timestamp:
                        continue
                    if timestamp > to_timestamp:
                        break
                    operation_stake = LiquityStakeEventType.deserialize(
                        change['stakeOperation'])
                    lqty_price = PriceHistorian().query_historical_price(
                        from_asset=A_LQTY,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    lusd_price = PriceHistorian().query_historical_price(
                        from_asset=A_LUSD,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    stake_after = deserialize_optional_to_fval(
                        value=change['stakedAmountAfter'],
                        name='stakedAmountAfter',
                        location='liquity',
                    )
                    stake_change = deserialize_optional_to_fval(
                        value=change['stakedAmountChange'],
                        name='stakedAmountChange',
                        location='liquity',
                    )
                    issuance_gain = deserialize_optional_to_fval(
                        value=change['issuanceGain'],
                        name='issuanceGain',
                        location='liquity',
                    )
                    redemption_gain = deserialize_optional_to_fval(
                        value=change['redemptionGain'],
                        name='redemptionGain',
                        location='liquity',
                    )
                    stake_event = LiquityStakeEvent(
                        kind='stake',
                        tx=change['transaction']['id'],
                        address=owner,
                        timestamp=timestamp,
                        stake_after=AssetBalance(
                            asset=A_LQTY,
                            balance=Balance(
                                amount=stake_after,
                                usd_value=lqty_price * stake_after,
                            ),
                        ),
                        stake_change=AssetBalance(
                            asset=A_LQTY,
                            balance=Balance(
                                amount=stake_change,
                                usd_value=lqty_price * stake_change,
                            ),
                        ),
                        issuance_gain=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=issuance_gain,
                                usd_value=lusd_price * issuance_gain,
                            ),
                        ),
                        redemption_gain=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=redemption_gain,
                                usd_value=lusd_price * redemption_gain,
                            ),
                        ),
                        stake_operation=operation_stake,
                        sequence_number=str(
                            change['transaction']['sequenceNumber']),
                    )
                    result[owner].append(stake_event)
                except (DeserializationError, KeyError) as e:
                    msg = str(e)
                    log.debug(f'Failed to deserialize Liquity entry: {change}')
                    if isinstance(e, KeyError):
                        msg = f'Missing key entry for {msg}.'
                    self.msg_aggregator.add_warning(
                        f'Ignoring Liquity Stake event in Liquity. '
                        f'Failed to decode remote information. {msg}.', )
                    continue
        return result
Beispiel #12
0
    def get_trove_history(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
    ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]:
        addresses_to_query = list(addresses)
        proxied_addresses = self._get_accounts_having_proxy()
        proxies_to_address = {v: k for k, v in proxied_addresses.items()}
        addresses_to_query += proxied_addresses.values()

        try:
            query = self._get_raw_history(addresses_to_query, 'trove')
        except RemoteError as e:
            log.error(
                f'Failed to query trove graph events for liquity. {str(e)}')
            query = {}

        result: Dict[ChecksumEthAddress,
                     List[LiquityEvent]] = defaultdict(list)
        for trove in query.get('troves', []):
            owner = to_checksum_address(trove['owner']['id'])
            if owner in proxies_to_address:
                owner = proxies_to_address[owner]
            for change in trove['changes']:
                try:
                    timestamp = change['transaction']['timestamp']
                    if timestamp < from_timestamp:
                        continue
                    if timestamp > to_timestamp:
                        break
                    operation = TroveOperation.deserialize(
                        change['troveOperation'])
                    collateral_change = deserialize_optional_to_fval(
                        value=change['collateralChange'],
                        name='collateralChange',
                        location='liquity',
                    )
                    debt_change = deserialize_optional_to_fval(
                        value=change['debtChange'],
                        name='debtChange',
                        location='liquity',
                    )
                    lusd_price = PriceHistorian().query_historical_price(
                        from_asset=A_LUSD,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    eth_price = PriceHistorian().query_historical_price(
                        from_asset=A_ETH,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    debt_after_amount = deserialize_optional_to_fval(
                        value=change['debtAfter'],
                        name='debtAfter',
                        location='liquity',
                    )
                    collateral_after_amount = deserialize_optional_to_fval(
                        value=change['collateralAfter'],
                        name='collateralAfter',
                        location='liquity',
                    )
                    event = LiquityTroveEvent(
                        kind='trove',
                        tx=change['transaction']['id'],
                        address=owner,
                        timestamp=timestamp,
                        debt_after=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=debt_after_amount,
                                usd_value=lusd_price * debt_after_amount,
                            ),
                        ),
                        collateral_after=AssetBalance(
                            asset=A_ETH,
                            balance=Balance(
                                amount=collateral_after_amount,
                                usd_value=eth_price * collateral_after_amount,
                            ),
                        ),
                        debt_delta=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=debt_change,
                                usd_value=lusd_price * debt_change,
                            ),
                        ),
                        collateral_delta=AssetBalance(
                            asset=A_ETH,
                            balance=Balance(
                                amount=collateral_change,
                                usd_value=eth_price * collateral_change,
                            ),
                        ),
                        trove_operation=operation,
                        sequence_number=str(change['sequenceNumber']),
                    )
                    result[owner].append(event)
                except (DeserializationError, KeyError) as e:
                    log.debug(
                        f'Failed to deserialize Liquity trove event: {change}')
                    msg = str(e)
                    if isinstance(e, KeyError):
                        msg = f'Missing key entry for {msg}.'
                    self.msg_aggregator.add_warning(
                        f'Ignoring Liquity Trove event in Liquity. '
                        f'Failed to decode remote information. {msg}.', )
                    continue

        return result
Beispiel #13
0
def initialize_mock_rotkehlchen_instance(
    rotki,
    start_with_logged_in_user,
    start_with_valid_premium,
    db_password,
    rotki_premium_credentials,
    username,
    blockchain_accounts,
    include_etherscan_key,
    include_cryptocompare_key,
    should_mock_price_queries,
    mocked_price_queries,
    ethereum_modules,
    db_settings,
    ignored_assets,
    tags,
    manually_tracked_balances,
    default_mock_price_value,
    ethereum_manager_connect_at_start,
    kusama_manager_connect_at_start,
    eth_rpc_endpoint,
    ksm_rpc_endpoint,
    aave_use_graph,
    max_tasks_num,
):
    if not start_with_logged_in_user:
        return

    # Mock the initial get settings to include the specified ethereum modules
    def mock_get_settings() -> DBSettings:
        settings = DBSettings(
            active_modules=ethereum_modules,
            eth_rpc_endpoint=eth_rpc_endpoint,
            ksm_rpc_endpoint=ksm_rpc_endpoint,
        )
        return settings

    settings_patch = patch.object(rotki,
                                  'get_settings',
                                  side_effect=mock_get_settings)

    # Do not connect to the usual nodes at start by default. Do not want to spam
    # them during our tests. It's configurable per test, with the default being nothing
    eth_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.ETHEREUM_NODES_TO_CONNECT_AT_START',
        new=ethereum_manager_connect_at_start,
    )
    ksm_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.KUSAMA_NODES_TO_CONNECT_AT_START',
        new=kusama_manager_connect_at_start,
    )
    ksm_connect_on_startup_patch = patch.object(
        rotki,
        '_connect_ksm_manager_on_startup',
        return_value=bool(blockchain_accounts.ksm),
    )
    # patch the constants to make sure that the periodic query for icons
    # does not run during tests
    size_patch = patch('rotkehlchen.rotkehlchen.ICONS_BATCH_SIZE', new=0)
    sleep_patch = patch('rotkehlchen.rotkehlchen.ICONS_QUERY_SLEEP',
                        new=999999)
    with settings_patch, eth_rpcconnect_patch, ksm_rpcconnect_patch, ksm_connect_on_startup_patch, size_patch, sleep_patch:  # noqa: E501
        rotki.unlock_user(
            user=username,
            password=db_password,
            create_new=True,
            sync_approval='no',
            premium_credentials=None,
        )
    # configure when task manager should run for tests
    rotki.task_manager.max_tasks_num = max_tasks_num

    if start_with_valid_premium:
        rotki.premium = Premium(rotki_premium_credentials)
        rotki.premium_sync_manager.premium = rotki.premium

    # After unlocking when all objects are created we need to also include
    # customized fixtures that may have been set by the tests
    rotki.chain_manager.accounts = blockchain_accounts
    add_settings_to_test_db(rotki.data.db, db_settings, ignored_assets)
    maybe_include_etherscan_key(rotki.data.db, include_etherscan_key)
    maybe_include_cryptocompare_key(rotki.data.db, include_cryptocompare_key)
    add_blockchain_accounts_to_db(rotki.data.db, blockchain_accounts)
    add_tags_to_test_db(rotki.data.db, tags)
    add_manually_tracked_balances_to_test_db(rotki.data.db,
                                             manually_tracked_balances)
    maybe_mock_historical_price_queries(
        historian=PriceHistorian(),
        should_mock_price_queries=should_mock_price_queries,
        mocked_price_queries=mocked_price_queries,
        default_mock_value=default_mock_price_value,
    )
    wait_until_all_nodes_connected(
        ethereum_manager_connect_at_start=ethereum_manager_connect_at_start,
        ethereum=rotki.chain_manager.ethereum,
    )
    wait_until_all_substrate_nodes_connected(
        substrate_manager_connect_at_start=kusama_manager_connect_at_start,
        substrate_manager=rotki.chain_manager.kusama,
    )

    aave = rotki.chain_manager.get_module('aave')
    if aave:
        aave.use_graph = aave_use_graph
Beispiel #14
0
    def unlock_user(
        self,
        user: str,
        password: str,
        create_new: bool,
        sync_approval: Literal['yes', 'no', 'unknown'],
        premium_credentials: Optional[PremiumCredentials],
        initial_settings: Optional[ModifiableDBSettings] = None,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True

        May raise:
        - PremiumAuthenticationError if the password can't unlock the database.
        - AuthenticationError if premium_credentials are given and are invalid
        or can't authenticate with the server
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        - SystemPermissionError if the directory or DB file can not be accessed
        """
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
            initial_settings=initial_settings,
        )

        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new,
                                               initial_settings)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data,
                                                       password=password)
        # set the DB in the external services instances that need it
        self.cryptocompare.set_database(self.data.db)

        # Anything that was set above here has to be cleaned in case of failure in the next step
        # by reset_after_failed_account_creation_or_login()
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                given_premium_credentials=premium_credentials,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
            )
        except PremiumAuthenticationError:
            # Reraise it only if this is during the creation of a new account where
            # the premium credentials were given by the user
            if create_new:
                raise
            self.msg_aggregator.add_warning(
                'Could not authenticate the Rotki premium API keys found in the DB.'
                ' Has your subscription expired?', )
            # else let's just continue. User signed in succesfully, but he just
            # has unauthenticable/invalid premium credentials remaining in his DB

        settings = self.get_settings()
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='submit_usage_analytics',
            exception_is_error=False,
            method=maybe_submit_usage_analytics,
            should_submit=settings.submit_usage_analytics,
        )
        self.etherscan = Etherscan(database=self.data.db,
                                   msg_aggregator=self.msg_aggregator)
        self.beaconchain = BeaconChain(database=self.data.db,
                                       msg_aggregator=self.msg_aggregator)
        eth_rpc_endpoint = settings.eth_rpc_endpoint
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        PriceHistorian().set_oracles_order(settings.historical_price_oracles)

        self.accountant = Accountant(
            db=self.data.db,
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
            premium=self.premium,
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=settings.anonymized_logs)
        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        # Initialize blockchain querying modules
        ethereum_manager = EthereumManager(
            ethrpc_endpoint=eth_rpc_endpoint,
            etherscan=self.etherscan,
            database=self.data.db,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=ETHEREUM_NODES_TO_CONNECT_AT_START,
        )
        kusama_manager = SubstrateManager(
            chain=SubstrateChain.KUSAMA,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=KUSAMA_NODES_TO_CONNECT_AT_START,
            connect_on_startup=self._connect_ksm_manager_on_startup(),
            own_rpc_endpoint=settings.ksm_rpc_endpoint,
        )

        Inquirer().inject_ethereum(ethereum_manager)
        Inquirer().set_oracles_order(settings.current_price_oracles)

        self.chain_manager = ChainManager(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            ethereum_manager=ethereum_manager,
            kusama_manager=kusama_manager,
            msg_aggregator=self.msg_aggregator,
            database=self.data.db,
            greenlet_manager=self.greenlet_manager,
            premium=self.premium,
            eth_modules=settings.active_modules,
            data_directory=self.data_dir,
            beaconchain=self.beaconchain,
            btc_derivation_gap_limit=settings.btc_derivation_gap_limit,
        )
        self.events_historian = EventsHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
            chain_manager=self.chain_manager,
        )
        self.task_manager = TaskManager(
            max_tasks_num=DEFAULT_MAX_TASKS_NUM,
            greenlet_manager=self.greenlet_manager,
            api_task_greenlets=self.api_task_greenlets,
            database=self.data.db,
            cryptocompare=self.cryptocompare,
            premium_sync_manager=self.premium_sync_manager,
            chain_manager=self.chain_manager,
            exchange_manager=self.exchange_manager,
        )
        self.greenlet_manager.spawn_and_track(
            after_seconds=5,
            task_name='periodically_query_icons_until_all_cached',
            exception_is_error=False,
            method=self.icon_manager.periodically_query_icons_until_all_cached,
            batch_size=ICONS_BATCH_SIZE,
            sleep_time_secs=ICONS_QUERY_SLEEP,
        )
        self.user_is_logged_in = True
        log.debug('User unlocking complete')
Beispiel #15
0
def initialize_mock_rotkehlchen_instance(
    rotki,
    start_with_logged_in_user,
    start_with_valid_premium,
    db_password,
    rotki_premium_credentials,
    username,
    blockchain_accounts,
    include_etherscan_key,
    include_cryptocompare_key,
    should_mock_price_queries,
    mocked_price_queries,
    ethereum_modules,
    db_settings,
    ignored_assets,
    tags,
    manually_tracked_balances,
    default_mock_price_value,
    ethereum_manager_connect_at_start,
    kusama_manager_connect_at_start,
    eth_rpc_endpoint,
    ksm_rpc_endpoint,
    aave_use_graph,
    max_tasks_num,
    legacy_messages_via_websockets,
    data_migration_version,
    use_custom_database,
    user_data_dir,
    perform_migrations_at_unlock,
    perform_upgrades_at_unlock,
):
    if not start_with_logged_in_user:
        return

    # Mock the initial get settings to include the specified ethereum modules
    def mock_get_settings() -> DBSettings:
        settings = DBSettings(
            active_modules=ethereum_modules,
            eth_rpc_endpoint=eth_rpc_endpoint,
            ksm_rpc_endpoint=ksm_rpc_endpoint,
        )
        return settings

    settings_patch = patch.object(rotki,
                                  'get_settings',
                                  side_effect=mock_get_settings)

    # Do not connect to the usual nodes at start by default. Do not want to spam
    # them during our tests. It's configurable per test, with the default being nothing
    eth_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.ETHEREUM_NODES_TO_CONNECT_AT_START',
        new=ethereum_manager_connect_at_start,
    )
    ksm_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.KUSAMA_NODES_TO_CONNECT_AT_START',
        new=kusama_manager_connect_at_start,
    )
    ksm_connect_on_startup_patch = patch.object(
        rotki,
        '_connect_ksm_manager_on_startup',
        return_value=bool(blockchain_accounts.ksm),
    )
    # patch the constants to make sure that the periodic query for icons
    # does not run during tests
    size_patch = patch('rotkehlchen.rotkehlchen.ICONS_BATCH_SIZE', new=0)
    sleep_patch = patch('rotkehlchen.rotkehlchen.ICONS_QUERY_SLEEP',
                        new=999999)

    create_new = True
    if use_custom_database is not None:
        _use_prepared_db(user_data_dir, use_custom_database)
        create_new = False

    with ExitStack() as stack:
        stack.enter_context(settings_patch)
        stack.enter_context(eth_rpcconnect_patch)
        stack.enter_context(ksm_rpcconnect_patch)
        stack.enter_context(ksm_connect_on_startup_patch)
        stack.enter_context(size_patch)
        stack.enter_context(sleep_patch)

        if perform_migrations_at_unlock is False:
            migrations_patch = patch.object(
                DataMigrationManager,
                'maybe_migrate_data',
                side_effect=lambda *args: None,
            )
            stack.enter_context(migrations_patch)

        if perform_upgrades_at_unlock is False:
            upgrades_patch = patch.object(
                DBUpgradeManager,
                'run_upgrades',
                side_effect=lambda *args: None,
            )
            stack.enter_context(upgrades_patch)

        rotki.unlock_user(
            user=username,
            password=db_password,
            create_new=create_new,
            sync_approval='no',
            premium_credentials=None,
        )
    # configure when task manager should run for tests
    rotki.task_manager.max_tasks_num = max_tasks_num

    if start_with_valid_premium:
        rotki.premium = Premium(rotki_premium_credentials)
        rotki.premium_sync_manager.premium = rotki.premium
        rotki.chain_manager.premium = rotki.premium
        # Add premium to all the modules
        for module_name in AVAILABLE_MODULES_MAP:
            module = rotki.chain_manager.get_module(module_name)
            if module is not None:
                module.premium = rotki.premium

    if legacy_messages_via_websockets is False:
        rotki.msg_aggregator.rotki_notifier = None

    # After unlocking when all objects are created we need to also include
    # customized fixtures that may have been set by the tests
    rotki.chain_manager.accounts = blockchain_accounts
    add_settings_to_test_db(rotki.data.db, db_settings, ignored_assets,
                            data_migration_version)
    maybe_include_etherscan_key(rotki.data.db, include_etherscan_key)
    maybe_include_cryptocompare_key(rotki.data.db, include_cryptocompare_key)
    add_blockchain_accounts_to_db(rotki.data.db, blockchain_accounts)
    add_tags_to_test_db(rotki.data.db, tags)
    add_manually_tracked_balances_to_test_db(rotki.data.db,
                                             manually_tracked_balances)
    maybe_mock_historical_price_queries(
        historian=PriceHistorian(),
        should_mock_price_queries=should_mock_price_queries,
        mocked_price_queries=mocked_price_queries,
        default_mock_value=default_mock_price_value,
    )
    wait_until_all_nodes_connected(
        ethereum_manager_connect_at_start=ethereum_manager_connect_at_start,
        ethereum=rotki.chain_manager.ethereum,
    )
    wait_until_all_substrate_nodes_connected(
        substrate_manager_connect_at_start=kusama_manager_connect_at_start,
        substrate_manager=rotki.chain_manager.kusama,
    )

    aave = rotki.chain_manager.get_module('aave')
    if aave:
        aave.use_graph = aave_use_graph
Beispiel #16
0
    def __init__(self, data_directory='.'):
        # pylint: disable=super-init-not-called
        self.data_directory = Path(data_directory)
        with open(self.data_directory / 'buchfink.yaml', 'r') as cfg:
            yaml_config = yaml.load(cfg, Loader=yaml.SafeLoader)
        self.config = config_schema(yaml_config)
        self.accounts = accounts_from_config(self.config)  # type: List[Account]
        self._active_eth_address = None  # type: Optional[ChecksumEthAddress]

        self.reports_directory = self.data_directory / "reports"
        self.trades_directory = self.data_directory / "trades"
        self.cache_directory = self.data_directory / "cache"
        self.balances_directory = self.data_directory / "balances"
        self.annotations_directory = self.data_directory / "annotations"
        self.user_data_dir = self.data_directory / "user"

        self.reports_directory.mkdir(exist_ok=True)
        self.trades_directory.mkdir(exist_ok=True)
        self.balances_directory.mkdir(exist_ok=True)
        self.cache_directory.mkdir(exist_ok=True)
        (self.cache_directory / 'cryptocompare').mkdir(exist_ok=True)
        (self.cache_directory / 'history').mkdir(exist_ok=True)
        (self.cache_directory / 'inquirer').mkdir(exist_ok=True)
        (self.cache_directory / 'coingecko').mkdir(exist_ok=True)

        self.last_write_ts: Optional[Timestamp] = None

        self._amm_swaps = []  # type: List[AMMSwap]
        self._eth_tx = []  # type: List[EthereumTransaction]
        self._eth_receipts_store = pickledb.load(self.cache_directory / 'receipts.db', False)
        self.cryptocompare = Cryptocompare(self.cache_directory / 'cryptocompare', self)
        self.coingecko = Coingecko()
        self.historian = PriceHistorian(
                self.cache_directory / 'history',
                self.cryptocompare,
                self.coingecko
            )
        self.inquirer = Inquirer(self.cache_directory / 'inquirer',
                self.cryptocompare,
                self.coingecko
            )
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator)

        # Initialize blockchain querying modules
        self.etherscan = Etherscan(database=self, msg_aggregator=self.msg_aggregator)
        GlobalDBHandler._GlobalDBHandler__instance = None
        self.globaldb = GlobalDBHandler(self.cache_directory)
        self.asset_resolver = AssetResolver()
        self.assets_updater = AssetsUpdater(self.msg_aggregator)
        self.ethereum_manager = EthereumManager(
            database=self,
            ethrpc_endpoint=self.get_eth_rpc_endpoint(),
            etherscan=self.etherscan,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=[]
        )
        self.inquirer.inject_ethereum(self.ethereum_manager)
        self.inquirer.set_oracles_order(self.get_settings().current_price_oracles)
        self.historian.set_oracles_order(self.get_settings().historical_price_oracles)
        self.beaconchain = BeaconChain(database=self, msg_aggregator=self.msg_aggregator)