示例#1
0
    def query_missing_prices_of_base_entries(
        self,
        entries_missing_prices: List[Tuple[str, FVal, Asset, Timestamp]],
    ) -> None:
        """Queries missing prices for HistoryBaseEntry in database updating
        the price if it is found. Otherwise we add the id to the ignore list
        for this session.
        """
        inquirer = PriceHistorian()
        updates = []
        for identifier, amount, asset, timestamp in entries_missing_prices:
            try:
                price = inquirer.query_historical_price(
                    from_asset=asset,
                    to_asset=A_USD,
                    timestamp=timestamp,
                )
            except (NoPriceForGivenTimestamp, RemoteError) as e:
                log.error(
                    f'Failed to find price for {asset} at {timestamp} in base '
                    f'entry {identifier}. {str(e)}.', )
                self.base_entries_ignore_set.add(identifier)
                continue

            usd_value = amount * price
            updates.append((str(usd_value), identifier))

        query = 'UPDATE history_events SET usd_value=? WHERE rowid=?'
        cursor = self.database.conn.cursor()
        cursor.executemany(query, updates)
        self.database.update_last_write()
示例#2
0
def quote(asset: Tuple[str], amount: float, base_asset_: Optional[str], timestamp: Optional[str]):
    buchfink_db = BuchfinkDB()
    buchfink_db.perform_assets_updates()
    base_asset = buchfink_db.get_asset_by_symbol(base_asset_) \
            if base_asset_ \
            else buchfink_db.get_main_currency()
    base_in_usd = FVal(buchfink_db.inquirer.find_usd_price(base_asset))
    a_usd = buchfink_db.get_asset_by_symbol('USD')

    ds_timestamp = deserialize_timestamp(timestamp) if timestamp else None
    historian = PriceHistorian()

    for symbol in asset:
        asset_ = buchfink_db.get_asset_by_symbol(symbol)
        if ds_timestamp:
            asset_usd = historian.query_historical_price(
                    from_asset=asset_,
                    to_asset=a_usd,
                    timestamp=ds_timestamp
            )
        else:
            asset_usd = FVal(buchfink_db.inquirer.find_usd_price(asset_))
        click.echo('{} {} = {} {}'.format(
                click.style(f'{amount}', fg='white'),
                click.style(asset_.symbol, fg='green'),
                click.style(f'{FVal(amount) * asset_usd / base_in_usd}', fg='white'),
                click.style(base_asset.symbol, fg='green')
        ))
示例#3
0
def price_historian(
    data_dir,
    inquirer,  # pylint: disable=unused-argument
    should_mock_price_queries,
    mocked_price_queries,
    cryptocompare,
    session_coingecko,
    default_mock_price_value,
    historical_price_oracles_order,
    dont_mock_price_for,
):
    # Since this is a singleton and we want it initialized everytime the fixture
    # is called make sure its instance is always starting from scratch
    PriceHistorian._PriceHistorian__instance = None
    historian = PriceHistorian(
        data_directory=data_dir,
        cryptocompare=cryptocompare,
        coingecko=session_coingecko,
    )
    historian.set_oracles_order(historical_price_oracles_order)
    maybe_mock_historical_price_queries(
        historian=historian,
        should_mock_price_queries=should_mock_price_queries,
        mocked_price_queries=mocked_price_queries,
        default_mock_value=default_mock_price_value,
        dont_mock_price_for=dont_mock_price_for,
    )

    return historian
示例#4
0
def fixture_fake_price_historian(historical_price_oracles_order):
    # NB: custom fixture for quick unit testing. Do not export.
    # Since this is a singleton and we want it initialized everytime the fixture
    # is called make sure its instance is always starting from scratch
    PriceHistorian._PriceHistorian__instance = None
    price_historian = PriceHistorian(
        data_directory=MagicMock(spec=Path),
        cryptocompare=MagicMock(spec=Cryptocompare),
        coingecko=MagicMock(spec=Coingecko),
    )
    price_historian.set_oracles_order(historical_price_oracles_order)
    return price_historian
示例#5
0
    def set_settings(self, settings: ModifiableDBSettings) -> Tuple[bool, str]:
        """Tries to set new settings. Returns True in success or False with message if error"""
        if settings.eth_rpc_endpoint is not None:
            result, msg = self.chain_manager.set_eth_rpc_endpoint(
                settings.eth_rpc_endpoint)
            if not result:
                return False, msg

        if settings.ksm_rpc_endpoint is not None:
            result, msg = self.chain_manager.set_ksm_rpc_endpoint(
                settings.ksm_rpc_endpoint)
            if not result:
                return False, msg

        if settings.kraken_account_type is not None:
            kraken = self.exchange_manager.get('kraken')
            if kraken:
                kraken.set_account_type(
                    settings.kraken_account_type)  # type: ignore

        if settings.btc_derivation_gap_limit is not None:
            self.chain_manager.btc_derivation_gap_limit = settings.btc_derivation_gap_limit

        if settings.current_price_oracles is not None:
            Inquirer().set_oracles_order(settings.current_price_oracles)

        if settings.historical_price_oracles is not None:
            PriceHistorian().set_oracles_order(
                settings.historical_price_oracles)
        if settings.active_modules is not None:
            self.chain_manager.process_new_modules_list(
                settings.active_modules)

        self.data.db.set_settings(settings)
        return True, ''
示例#6
0
 def query_historical_price(from_asset: Asset, to_asset: Asset,
                            timestamp: Timestamp):
     return PriceHistorian().query_historical_price(
         from_asset=from_asset,
         to_asset=to_asset,
         timestamp=timestamp,
     )
示例#7
0
    def _update_events_value(
        self,
        staking_events: ADXStakingEvents,
    ) -> None:
        # Update amounts for unbonds and unbond requests
        bond_id_bond_map: Dict[str, Optional[Bond]] = {
            bond.bond_id: bond
            for bond in staking_events.bonds
        }
        for event in (
                staking_events.unbonds + staking_events.
                unbond_requests  # type: ignore # mypy bug concatenating lists
        ):
            has_bond = True
            bond = bond_id_bond_map.get(event.bond_id, None)
            if bond:
                event.value = Balance(amount=bond.value.amount)
                event.pool_id = bond.pool_id
            elif event.bond_id not in bond_id_bond_map:
                bond_id_bond_map[event.bond_id] = None
                db_bonds = cast(
                    List[Bond],
                    self.database.get_adex_events(
                        bond_id=event.bond_id,
                        event_type=AdexEventType.BOND,
                    ))
                if db_bonds:
                    db_bond = db_bonds[0]
                    bond_id_bond_map[event.bond_id] = db_bond
                    event.value = Balance(amount=db_bond.value.amount)
                    event.pool_id = db_bond.pool_id
                else:
                    has_bond = False
            else:
                has_bond = False

            if has_bond is False:
                log.warning(
                    'Failed to update an AdEx event data. Unable to find its related bond event',
                    event=event,
                )

        # Update usd_value for all events
        for event in staking_events.get_all(
        ):  # type: ignore # event can have all types
            token = event.token if isinstance(event,
                                              ChannelWithdraw) else A_ADX
            usd_price = PriceHistorian().query_historical_price(
                from_asset=token,
                to_asset=A_USD,
                timestamp=event.timestamp,
            )
            event.value.usd_value = event.value.amount * usd_price
示例#8
0
    def process_gitcoin(
            self,
            from_ts: Optional[Timestamp],
            to_ts: Optional[Timestamp],
            grant_id: Optional[int],
    ) -> Tuple[Asset, Dict[int, GitcoinReport]]:
        """Processess gitcoin transactions in the given time range and creates a report"""
        actions = self.db_ledger.get_gitcoin_grant_events(
            grant_id=grant_id,
            from_ts=from_ts,
            to_ts=to_ts,
        )
        profit_currency = self.db.get_main_currency()
        reports: DefaultDict[int, GitcoinReport] = defaultdict(GitcoinReport)

        for entry in actions:
            balance = Balance(amount=entry.amount)
            if entry.rate_asset is None or entry.rate is None:
                log.error(
                    f'Found gitcoin ledger action for {entry.amount} {entry.asset} '
                    f'without a rate asset. Should not happen. Entry was '
                    f'possibly edited by hand. Skipping.',
                )
                continue

            report = reports[entry.extra_data.grant_id]  # type: ignore
            rate = entry.rate
            if entry.rate_asset != profit_currency:
                try:
                    profit_currency_in_rate_asset = PriceHistorian().query_historical_price(
                        from_asset=profit_currency,
                        to_asset=entry.rate_asset,
                        timestamp=entry.timestamp,
                    )
                except NoPriceForGivenTimestamp as e:
                    self.db.msg_aggregator.add_error(
                        f'{str(e)} when processing gitcoin entry. Skipping entry.',
                    )
                    continue
                rate = entry.rate / profit_currency_in_rate_asset  # type: ignore  # checked above

            value_in_profit_currency = entry.amount * rate
            balance.usd_value = value_in_profit_currency
            report.per_asset[entry.asset] += balance
            report.total += value_in_profit_currency

        return self.db.get_main_currency(), reports
示例#9
0
 def get_main_currency_price(self,
                             timestamp: Timestamp) -> Tuple[Asset, Price]:
     """Gets the main currency and its equivalent price at a particular timestamp."""
     main_currency = self.db.get_main_currency()
     main_currency_price = None
     try:
         main_currency_price = PriceHistorian.query_historical_price(
             from_asset=A_USD,
             to_asset=main_currency,
             timestamp=timestamp,
         )
     except NoPriceForGivenTimestamp:
         main_currency_price = Price(ONE)
         self.msg_aggregator.add_error(
             f'Could not find price for timestamp {timestamp}. Using USD for export. '
             f'Please add manual price from USD to your main currency {main_currency}',
         )
     return main_currency, main_currency_price
示例#10
0
    def get_fee_in_profit_currency(self, trade: Trade) -> Fee:
        """Get the profit_currency rate of the fee of the given trade

        May raise:
        - PriceQueryUnsupportedAsset if from/to asset is missing from all price oracles
        - NoPriceForGivenTimestamp if we can't find a price for the asset in the given
        timestamp from the price oracle
        - RemoteError if there is a problem reaching the price oracle server
        or with reading the response returned by the server
        """
        if trade.fee_currency is None or trade.fee is None:
            return Fee(ZERO)

        fee_rate = PriceHistorian().query_historical_price(
            from_asset=trade.fee_currency,
            to_asset=self.profit_currency,
            timestamp=trade.timestamp,
        )
        return Fee(fee_rate * trade.fee)
示例#11
0
文件: events.py 项目: step21/rotki
    def get_rate_in_profit_currency(self, asset: Asset,
                                    timestamp: Timestamp) -> FVal:
        """Get the profit_currency price of asset in the given timestamp

        May raise:
        - PriceQueryUnsupportedAsset if from/to asset is missing from price oracles
        - NoPriceForGivenTimestamp if we can't find a price for the asset in the given
        timestamp from the price oracle
        - RemoteError if there is a problem reaching the price oracle server
        or with reading the response returned by the server
        """
        if asset == self.profit_currency:
            rate = FVal(1)
        else:
            rate = PriceHistorian().query_historical_price(
                from_asset=asset,
                to_asset=self.profit_currency,
                timestamp=timestamp,
            )
        return rate
示例#12
0
class BuchfinkDB(DBHandler):
    """
    This class is not very thought out and might need a refactor. Currently it
    does three things, namely:
    1) preparing classes from Rotki to be used by higher-level functions
    2) function as a Rotki DBHandler and provide data to Rotki classes
    3) load and parse Buchfink config
    """

    def __init__(self, data_directory='.'):
        # pylint: disable=super-init-not-called
        self.data_directory = Path(data_directory)
        with open(self.data_directory / 'buchfink.yaml', 'r') as cfg:
            yaml_config = yaml.load(cfg, Loader=yaml.SafeLoader)
        self.config = config_schema(yaml_config)
        self.accounts = accounts_from_config(self.config)  # type: List[Account]
        self._active_eth_address = None  # type: Optional[ChecksumEthAddress]

        self.reports_directory = self.data_directory / "reports"
        self.trades_directory = self.data_directory / "trades"
        self.cache_directory = self.data_directory / "cache"
        self.balances_directory = self.data_directory / "balances"
        self.annotations_directory = self.data_directory / "annotations"
        self.user_data_dir = self.data_directory / "user"

        self.reports_directory.mkdir(exist_ok=True)
        self.trades_directory.mkdir(exist_ok=True)
        self.balances_directory.mkdir(exist_ok=True)
        self.cache_directory.mkdir(exist_ok=True)
        (self.cache_directory / 'cryptocompare').mkdir(exist_ok=True)
        (self.cache_directory / 'history').mkdir(exist_ok=True)
        (self.cache_directory / 'inquirer').mkdir(exist_ok=True)
        (self.cache_directory / 'coingecko').mkdir(exist_ok=True)

        self.last_write_ts: Optional[Timestamp] = None

        self._amm_swaps = []  # type: List[AMMSwap]
        self._eth_tx = []  # type: List[EthereumTransaction]
        self._eth_receipts_store = pickledb.load(self.cache_directory / 'receipts.db', False)
        self.cryptocompare = Cryptocompare(self.cache_directory / 'cryptocompare', self)
        self.coingecko = Coingecko()
        self.historian = PriceHistorian(
                self.cache_directory / 'history',
                self.cryptocompare,
                self.coingecko
            )
        self.inquirer = Inquirer(self.cache_directory / 'inquirer',
                self.cryptocompare,
                self.coingecko
            )
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator)

        # Initialize blockchain querying modules
        self.etherscan = Etherscan(database=self, msg_aggregator=self.msg_aggregator)
        GlobalDBHandler._GlobalDBHandler__instance = None
        self.globaldb = GlobalDBHandler(self.cache_directory)
        self.asset_resolver = AssetResolver()
        self.assets_updater = AssetsUpdater(self.msg_aggregator)
        self.ethereum_manager = EthereumManager(
            database=self,
            ethrpc_endpoint=self.get_eth_rpc_endpoint(),
            etherscan=self.etherscan,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=[]
        )
        self.inquirer.inject_ethereum(self.ethereum_manager)
        self.inquirer.set_oracles_order(self.get_settings().current_price_oracles)
        self.historian.set_oracles_order(self.get_settings().historical_price_oracles)
        self.beaconchain = BeaconChain(database=self, msg_aggregator=self.msg_aggregator)

    def __del__(self):
        pass

    def get_asset_by_symbol(self, symbol: str) -> Asset:
        # TODO: this indirection function could incorporate a custom mapping from yaml config
        return deserialize_asset(symbol)

    def get_main_currency(self):
        return self.get_settings().main_currency

    def get_eth_rpc_endpoint(self):
        return self.config['settings'].get('eth_rpc_endpoint', None)

    def get_all_accounts(self) -> List[Account]:
        return self.accounts

    def get_all_reports(self) -> Iterable[ReportConfig]:
        for report_info in self.config['reports']:
            yield ReportConfig(
                name=str(report_info['name']),
                title=report_info.get('title'),
                template=report_info.get('template'),
                from_dt=datetime.fromisoformat(str(report_info['from'])),
                to_dt=datetime.fromisoformat(str(report_info['to']))
            )

    def get_settings(self, have_premium: bool = False) -> DBSettings:
        clean_settings = dict(self.config['settings'])
        if 'external_services' in clean_settings:
            del clean_settings['external_services']

        return db_settings_from_dict(clean_settings, self.msg_aggregator)

    def get_ignored_assets(self):
        return []

    def get_external_service_credentials(
            self,
            service_name: ExternalService,
    ) -> Optional[ExternalServiceApiCredentials]:
        """If existing it returns the external service credentials for the given service"""
        short_name = service_name.name.lower()
        api_key = self.config['settings'].get('external_services', {}).get(short_name)
        if not api_key:
            return None
        return ExternalServiceApiCredentials(service=service_name, api_key=api_key)

    def get_accountant(self) -> Accountant:
        return Accountant(self, None, self.msg_aggregator, True, premium=None)

    def get_blockchain_accounts(self) -> BlockchainAccounts:
        if self._active_eth_address:
            return BlockchainAccounts(eth=[self._active_eth_address], btc=[], ksm=[])
        return BlockchainAccounts(eth=[], btc=[], ksm=[])

    def get_trades_from_file(self, trades_file) -> List[Trade]:
        def safe_deserialize_trade(trade):
            try:
                return deserialize_trade(trade)
            except UnknownAsset:
                logger.warning('Ignoring trade with unknown asset: %s', trade)
                return None

        exchange = yaml.load(open(trades_file, 'r'), Loader=yaml.SafeLoader)
        return [ser_trade
                for ser_trade in [
                    safe_deserialize_trade(trade) for trade in exchange.get('trades', [])]
                if ser_trade is not None] \
                + [ser_trade
                for ser_trade in [
                    safe_deserialize_trade(trade) for trade in exchange.get('actions', [])
                    if 'buy' in trade or 'sell' in trade]
                if ser_trade is not None]

    def get_local_trades_for_account(self, account_name: Union[str, Account]) -> List[Trade]:
        if isinstance(account_name, str):
            account = [a for a in self.accounts if a.name == account_name][0]  # type: Account
        else:
            account = account_name

        if account.account_type == 'file':
            trades_file = os.path.join(self.data_directory, account.config['file'])
            return self.get_trades_from_file(trades_file)

        trades_file = os.path.join(self.data_directory, 'trades', account.name + '.yaml')

        if os.path.exists(trades_file):
            return self.get_trades_from_file(trades_file)

        return []

    def get_actions_from_file(self, actions_file):
        def safe_deserialize_ledger_action(action):
            if 'buy' in action or 'sell' in action:
                return None
            try:
                return deserialize_ledger_action(action)
            except UnknownAsset:
                logger.warning('Ignoring ledger action with unknown asset: %s', action)
                return None
        exchange = yaml.load(open(actions_file, 'r'), Loader=yaml.SafeLoader)
        return [ser_action
                for ser_action in [
                    safe_deserialize_ledger_action(action)
                    for action in exchange.get('actions', [])
                ]
                if ser_action is not None]

    def get_local_ledger_actions_for_account(self, account_name: Union[str, Account]) \
            -> List[Trade]:
        if isinstance(account_name, str):
            account = [a for a in self.accounts if a.name == account_name][0]  # type: Account
        else:
            account = account_name

        if account.account_type == 'file':
            actions_file = self.data_directory / account.config['file']
            if actions_file.exists():
                return self.get_actions_from_file(actions_file)

        else:
            actions_file = self.data_directory / f'actions/{account.name}.yaml'
            if actions_file.exists():
                return self.get_actions_from_file(actions_file)

        return []

    def get_chain_manager(self, account: Account) -> ChainManager:
        if account.account_type == "ethereum":
            accounts = BlockchainAccounts(eth=[account.address], btc=[], ksm=[])
        elif account.account_type == "bitcoin":
            accounts = BlockchainAccounts(eth=[], btc=[account.address], ksm=[])
        else:
            raise ValueError('Unable to create chain manager for account')

        # Eventually we should allow premium credentials in config file
        premium = False

        eth_modules = self.get_settings().active_modules
        if not premium:
            eth_modules = [mod for mod in eth_modules if mod not in PREMIUM_ONLY_ETH_MODULES]

        logger.debug('Creating ChainManager with modules: %s', eth_modules)

        manager = ChainManager(
            database=self,
            blockchain_accounts=accounts,
            beaconchain=self.beaconchain,
            data_directory=self.data_directory,
            ethereum_manager=self.ethereum_manager,
            kusama_manager=None,
            msg_aggregator=self.msg_aggregator,
            btc_derivation_gap_limit=self.get_settings().btc_derivation_gap_limit,
            greenlet_manager=self.greenlet_manager,
            premium=premium,
            eth_modules=eth_modules
        )
        # Monkey-patch function that uses singleton
        manager.queried_addresses_for_module = lambda self, module = None: [account]
        return manager

    def get_exchange(self, account: str) -> ExchangeInterface:

        account_info = [a for a in self.config['accounts'] if a['name'] == account][0]
        exchange_opts = dict(
            name=account_info['name'],
            api_key=str(account_info['api_key']),
            secret=str(account_info['secret']).encode(),
            database=self,
            msg_aggregator=self.msg_aggregator
        )

        if account_info['exchange'] == 'kraken':
            exchange = Kraken(**exchange_opts)
        elif account_info['exchange'] == 'binance':
            exchange = Binance(**exchange_opts)
        elif account_info['exchange'] == 'coinbase':
            exchange = Coinbase(**exchange_opts)
        elif account_info['exchange'] == 'coinbasepro':
            exchange = Coinbasepro(**exchange_opts, passphrase=str(account_info['passphrase']))
        elif account_info['exchange'] == 'gemini':
            exchange = Gemini(**exchange_opts)
        elif account_info['exchange'] == 'bitmex':
            exchange = Bitmex(**exchange_opts)
        elif account_info['exchange'] == 'bittrex':
            exchange = Bittrex(**exchange_opts)
        elif account_info['exchange'] == 'poloniex':
            exchange = Poloniex(**exchange_opts)
        elif account_info['exchange'] == 'bitcoinde':
            exchange = Bitcoinde(**exchange_opts)
        elif account_info['exchange'] == 'iconomi':
            exchange = Iconomi(**exchange_opts)
        else:
            raise ValueError("Unknown exchange: " + account_info['exchange'])

        return exchange

    def get_tokens_for_address_if_time(self, address, current_time):
        return None

    def save_tokens_for_address(self, address, tokens):
        pass

    def query_balances(self, account) -> BalanceSheet:
        if account.account_type == "exchange":
            exchange = self.get_exchange(account.name)

            api_key_is_valid, error = exchange.validate_api_key()

            if not api_key_is_valid:
                raise RuntimeError(error)

            balances, error = exchange.query_balances()

            if not error:
                logger.info(
                        'Fetched balances for %d assets from %s',
                        len(balances.keys()),
                        account.name
                    )
                return BalanceSheet(assets=balances, liabilities={})

            raise RuntimeError(error)

        if account.account_type == "ethereum":
            manager = self.get_chain_manager(account)

            # This is a little hack because query_balances sometimes hooks back
            # into out get_blockchain_accounts() without providing context (for
            # example from makerdao module).
            self._active_eth_address = account.address
            manager.query_balances(
                blockchain=SupportedBlockchain.ETHEREUM,
                force_token_detection=True
            )
            self._active_eth_address = None

            return reduce(operator.add, manager.balances.eth.values())

        if account.account_type == "bitcoin":
            manager = self.get_chain_manager(account)
            manager.query_balances()
            btc = Asset('BTC')

            return BalanceSheet(assets={
                btc: reduce(operator.add, manager.balances.btc.values())
            }, liabilities={})

        if account.account_type == "file":
            return self.get_balances_from_file(account.config['file'])

        return BalanceSheet(assets={}, liabilities={})

    def fetch_balances(self, account):
        query_sheet = self.query_balances(account)
        path = self.annotations_directory / (account.name + '.yaml')
        if path.exists():
            query_sheet += self.get_balances_from_file(path)
        self.write_balances(account, query_sheet)

    def get_balances(self, account) -> BalanceSheet:
        path = self.balances_directory / (account.name + '.yaml')
        if path.exists():
            return self.get_balances_from_file(path)
        return BalanceSheet(assets={}, liabilities={})

    def get_balances_from_file(self, path) -> BalanceSheet:
        account = yaml.load(open(path, 'r'), Loader=yaml.SafeLoader)
        assets = {}  # type: Dict[Asset, Balance]
        liabilities = {}  # type: Dict[Asset, Balance]

        if 'balances' in account:
            logger.warning('Found deprecated key "balances", please use "assets" instead.')
            for balance in account['balances']:
                balance, asset = deserialize_balance(balance, self)
                if asset in assets:
                    assets[asset] += balance
                else:
                    assets[asset] = balance

        if 'assets' in account:
            for balance in account['assets']:
                balance, asset = deserialize_balance(balance, self)
                if asset in assets:
                    assets[asset] += balance
                else:
                    assets[asset] = balance

        if 'liabilities' in account:
            for balance in account['liabilities']:
                balance, asset = deserialize_balance(balance, self)
                if asset in liabilities:
                    liabilities[asset] += balance
                else:
                    liabilities[asset] = balance

        return BalanceSheet(assets=assets, liabilities=liabilities)

    def write_balances(self, account: Account, balances: BalanceSheet):
        path = self.balances_directory / (account.name + '.yaml')

        with path.open('w') as balances_file:
            yaml.dump(serialize_balances(balances), stream=balances_file)

    def get_amm_swaps(
            self,
            from_ts: Optional[Timestamp] = None,
            to_ts: Optional[Timestamp] = None,
            location: Optional[Location] = None,
            address: Optional[ChecksumEthAddress] = None,
    ) -> List[AMMSwap]:
        return self._amm_swaps

    def add_amm_swaps(self, swaps: List[AMMSwap]) -> None:
        self._amm_swaps = []
        self._amm_swaps.extend(swaps)

    def update_used_query_range(self, name: str, start_ts: Timestamp, end_ts: Timestamp) -> None:
        pass

    def update_used_block_query_range(self, name: str, from_block: int, to_block: int) -> None:
        pass

    def get_used_query_range(self, name: str) -> Optional[Tuple[Timestamp, Timestamp]]:
        return None

    def get_ethereum_transaction_receipt(self, tx_hash: str, manager: ChainManager):
        receipt = self._eth_receipts_store.get(tx_hash)
        if receipt:
            return receipt

        receipt = manager.ethereum.get_transaction_receipt(tx_hash)
        self._eth_receipts_store.set(tx_hash, receipt)
        self._eth_receipts_store.dump()
        return receipt

    def get_ignored_action_ids(
            self,
            action_type: Optional[ActionType],
            ) -> Dict[ActionType, List[str]]:
        return {}

    def add_ethereum_transactions(
            self,
            ethereum_transactions: List[EthereumTransaction],
            from_etherscan: bool,
    ) -> None:
        self._eth_tx = []
        self._eth_tx.extend(ethereum_transactions)

    def get_ethereum_transactions(
            self,
            from_ts: Optional[Timestamp] = None,
            to_ts: Optional[Timestamp] = None,
            address: Optional[ChecksumEthAddress] = None,
    ) -> List[EthereumTransaction]:
        return self._eth_tx

    def perform_assets_updates(self):
        self.assets_updater.perform_update(None, 'remote')
        for token_data in self.config.get('tokens', []):
            eth_token = deserialize_ethereum_token(token_data)
            identifier = '_ceth_' + eth_token.address

            try:
                self.get_asset_by_symbol(identifier)
                logger.debug('Asset already exists: %s', eth_token)
            except UnknownAsset:
                self.globaldb.add_asset(identifier, AssetType.ETHEREUM_TOKEN, eth_token)
                try:
                    self.get_asset_by_symbol(identifier)
                except UnknownAsset as exc:
                    raise ValueError('Unable to add asset: ' + str(eth_token)) from exc

            self.asset_resolver.clean_memory_cache()
示例#13
0
    def process_trades(
        db: DBHandler,
        timestamp: Timestamp,
        data: List[BinanceCsvRow],
    ) -> List[Trade]:
        """Processes multiple rows data and stores it into rotki's trades
        Each row has format: {'Operation': ..., 'Change': ..., 'Coin': ...}
        Change is amount, Coin is asset
        If amount is negative then this asset is sold, otherwise it's bought
        """
        # Because we can get mixed data (e.g. multiple Buys or Sells on a single timestamp) we need
        # to group it somehow. We are doing it by grouping the highest bought with the highest
        # sold value. We query usd equivalent for each amount because different Sells / Buys
        # may use different assets.

        # If we query price for the first time it can take long, so we would like to avoid it,
        # and therefore we check if all Buys / Sells use the same asset.
        # If so, we can group by original amount.

        # Checking assets
        same_assets = True
        assets: Dict[str, Optional[Asset]] = defaultdict(lambda: None)
        for row in data:
            if row['Operation'] == 'Fee':
                cur_operation = 'Fee'
            elif row['Change'] < 0:
                cur_operation = 'Sold'
            else:
                cur_operation = 'Bought'
            assets[cur_operation] = assets[cur_operation] or row['Coin']
            if assets[cur_operation] != row['Coin']:
                same_assets = False
                break

        # Querying usd value if needed
        if same_assets is False:
            for row in data:
                try:
                    price = PriceHistorian.query_historical_price(
                        from_asset=row['Coin'],
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                except NoPriceForGivenTimestamp:
                    # If we can't find price we can't group, so we quit the method
                    log.warning(f'Couldn\'t find price of {row["Coin"]} on {timestamp}')
                    return []
                row['usd_value'] = row['Change'] * price

        # Group rows depending on whether they are fee or not and then sort them by amount
        rows_grouped_by_fee: Dict[bool, List[BinanceCsvRow]] = defaultdict(list)
        for row in data:
            is_fee = row['Operation'] == 'Fee'
            rows_grouped_by_fee[is_fee].append(row)

        for rows_group in rows_grouped_by_fee.values():
            rows_group.sort(key=lambda x: x['Change'] if same_assets else x['usd_value'], reverse=True)  # noqa: E501

        # Grouping by combining the highest sold with the highest bought and the highest fee
        # Using fee only we were provided with fee (checking by "True in rows_by_operation")
        grouped_trade_rows = []
        while len(rows_grouped_by_fee[False]) > 0:
            cur_batch = [rows_grouped_by_fee[False].pop(), rows_grouped_by_fee[False].pop(0)]
            if True in rows_grouped_by_fee:
                cur_batch.append(rows_grouped_by_fee[True].pop())
            grouped_trade_rows.append(cur_batch)

        # Creating trades structures based on grouped rows data
        raw_trades: List[Trade] = []
        for trade_rows in grouped_trade_rows:
            to_asset: Optional[Asset] = None
            to_amount: Optional[AssetAmount] = None
            from_asset: Optional[Asset] = None
            from_amount: Optional[AssetAmount] = None
            fee_asset: Optional[Asset] = None
            fee_amount: Optional[Fee] = None
            trade_type: Optional[TradeType] = None

            for row in trade_rows:
                cur_asset = row['Coin']
                amount = row['Change']
                if row['Operation'] == 'Fee':
                    fee_asset = cur_asset
                    fee_amount = Fee(amount)
                else:
                    trade_type = TradeType.SELL if row['Operation'] == 'Sell' else TradeType.BUY  # noqa:  E501
                    if amount < 0:
                        from_asset = cur_asset
                        from_amount = AssetAmount(-amount)
                    else:
                        to_asset = cur_asset
                        to_amount = amount

            # Validate that we have received proper assets and amounts.
            # There can be no fee, so we don't validate it
            if (
                to_asset is None or from_asset is None or trade_type is None or
                to_amount is None or to_amount == ZERO or
                from_amount is None or from_amount == ZERO
            ):
                log.warning(
                    f'Skipped binance rows {data} because '
                    f'it didn\'t have enough data',
                )
                db.msg_aggregator.add_warning('Skipped some rows because couldn\'t find amounts or it was zero')  # noqa: E501
                continue

            rate = to_amount / from_amount
            trade = Trade(
                timestamp=timestamp,
                location=Location.BINANCE,
                trade_type=trade_type,
                base_asset=to_asset,
                quote_asset=from_asset,
                amount=to_amount,
                rate=Price(rate),
                fee_currency=fee_asset,
                fee=fee_amount,
                link='',
                notes='Imported from binance CSV file. Binance operation: Buy / Sell',
            )
            raw_trades.append(trade)

        # Sometimes we can get absolutely identical trades (including timestamp) but the database
        # allows us to add only one of them. So we combine these trades into a huge single trade
        # First step: group trades
        grouped_trades: Dict[TradeID, List[Trade]] = defaultdict(list)
        for trade in raw_trades:
            grouped_trades[trade.identifier].append(trade)

        # Second step: combine them
        unique_trades = []
        for trades_group in grouped_trades.values():
            result_trade = trades_group[0]
            for trade in trades_group[1:]:
                result_trade.amount = AssetAmount(result_trade.amount + trade.amount)  # noqa: E501
                if result_trade.fee is not None and trade.fee is not None:
                    result_trade.fee = Fee(result_trade.fee + trade.fee)
            unique_trades.append(result_trade)

        return unique_trades
示例#14
0
    def get_staking_history(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
    ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]:
        try:
            staked = self._get_raw_history(addresses, 'stake')
        except RemoteError as e:
            log.error(
                f'Failed to query stake graph events for liquity. {str(e)}')
            staked = {}

        result: Dict[ChecksumEthAddress,
                     List[LiquityEvent]] = defaultdict(list)
        for stake in staked.get('lqtyStakes', []):
            owner = to_checksum_address(stake['id'])
            for change in stake['changes']:
                try:
                    timestamp = change['transaction']['timestamp']
                    if timestamp < from_timestamp:
                        continue
                    if timestamp > to_timestamp:
                        break
                    operation_stake = LiquityStakeEventType.deserialize(
                        change['stakeOperation'])
                    lqty_price = PriceHistorian().query_historical_price(
                        from_asset=A_LQTY,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    lusd_price = PriceHistorian().query_historical_price(
                        from_asset=A_LUSD,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    stake_after = deserialize_optional_to_fval(
                        value=change['stakedAmountAfter'],
                        name='stakedAmountAfter',
                        location='liquity',
                    )
                    stake_change = deserialize_optional_to_fval(
                        value=change['stakedAmountChange'],
                        name='stakedAmountChange',
                        location='liquity',
                    )
                    issuance_gain = deserialize_optional_to_fval(
                        value=change['issuanceGain'],
                        name='issuanceGain',
                        location='liquity',
                    )
                    redemption_gain = deserialize_optional_to_fval(
                        value=change['redemptionGain'],
                        name='redemptionGain',
                        location='liquity',
                    )
                    stake_event = LiquityStakeEvent(
                        kind='stake',
                        tx=change['transaction']['id'],
                        address=owner,
                        timestamp=timestamp,
                        stake_after=AssetBalance(
                            asset=A_LQTY,
                            balance=Balance(
                                amount=stake_after,
                                usd_value=lqty_price * stake_after,
                            ),
                        ),
                        stake_change=AssetBalance(
                            asset=A_LQTY,
                            balance=Balance(
                                amount=stake_change,
                                usd_value=lqty_price * stake_change,
                            ),
                        ),
                        issuance_gain=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=issuance_gain,
                                usd_value=lusd_price * issuance_gain,
                            ),
                        ),
                        redemption_gain=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=redemption_gain,
                                usd_value=lusd_price * redemption_gain,
                            ),
                        ),
                        stake_operation=operation_stake,
                        sequence_number=str(
                            change['transaction']['sequenceNumber']),
                    )
                    result[owner].append(stake_event)
                except (DeserializationError, KeyError) as e:
                    msg = str(e)
                    log.debug(f'Failed to deserialize Liquity entry: {change}')
                    if isinstance(e, KeyError):
                        msg = f'Missing key entry for {msg}.'
                    self.msg_aggregator.add_warning(
                        f'Ignoring Liquity Stake event in Liquity. '
                        f'Failed to decode remote information. {msg}.', )
                    continue
        return result
示例#15
0
    def get_trove_history(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
    ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]:
        addresses_to_query = list(addresses)
        proxied_addresses = self._get_accounts_having_proxy()
        proxies_to_address = {v: k for k, v in proxied_addresses.items()}
        addresses_to_query += proxied_addresses.values()

        try:
            query = self._get_raw_history(addresses_to_query, 'trove')
        except RemoteError as e:
            log.error(
                f'Failed to query trove graph events for liquity. {str(e)}')
            query = {}

        result: Dict[ChecksumEthAddress,
                     List[LiquityEvent]] = defaultdict(list)
        for trove in query.get('troves', []):
            owner = to_checksum_address(trove['owner']['id'])
            if owner in proxies_to_address:
                owner = proxies_to_address[owner]
            for change in trove['changes']:
                try:
                    timestamp = change['transaction']['timestamp']
                    if timestamp < from_timestamp:
                        continue
                    if timestamp > to_timestamp:
                        break
                    operation = TroveOperation.deserialize(
                        change['troveOperation'])
                    collateral_change = deserialize_optional_to_fval(
                        value=change['collateralChange'],
                        name='collateralChange',
                        location='liquity',
                    )
                    debt_change = deserialize_optional_to_fval(
                        value=change['debtChange'],
                        name='debtChange',
                        location='liquity',
                    )
                    lusd_price = PriceHistorian().query_historical_price(
                        from_asset=A_LUSD,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    eth_price = PriceHistorian().query_historical_price(
                        from_asset=A_ETH,
                        to_asset=A_USD,
                        timestamp=timestamp,
                    )
                    debt_after_amount = deserialize_optional_to_fval(
                        value=change['debtAfter'],
                        name='debtAfter',
                        location='liquity',
                    )
                    collateral_after_amount = deserialize_optional_to_fval(
                        value=change['collateralAfter'],
                        name='collateralAfter',
                        location='liquity',
                    )
                    event = LiquityTroveEvent(
                        kind='trove',
                        tx=change['transaction']['id'],
                        address=owner,
                        timestamp=timestamp,
                        debt_after=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=debt_after_amount,
                                usd_value=lusd_price * debt_after_amount,
                            ),
                        ),
                        collateral_after=AssetBalance(
                            asset=A_ETH,
                            balance=Balance(
                                amount=collateral_after_amount,
                                usd_value=eth_price * collateral_after_amount,
                            ),
                        ),
                        debt_delta=AssetBalance(
                            asset=A_LUSD,
                            balance=Balance(
                                amount=debt_change,
                                usd_value=lusd_price * debt_change,
                            ),
                        ),
                        collateral_delta=AssetBalance(
                            asset=A_ETH,
                            balance=Balance(
                                amount=collateral_change,
                                usd_value=eth_price * collateral_change,
                            ),
                        ),
                        trove_operation=operation,
                        sequence_number=str(change['sequenceNumber']),
                    )
                    result[owner].append(event)
                except (DeserializationError, KeyError) as e:
                    log.debug(
                        f'Failed to deserialize Liquity trove event: {change}')
                    msg = str(e)
                    if isinstance(e, KeyError):
                        msg = f'Missing key entry for {msg}.'
                    self.msg_aggregator.add_warning(
                        f'Ignoring Liquity Trove event in Liquity. '
                        f'Failed to decode remote information. {msg}.', )
                    continue

        return result
示例#16
0
def initialize_mock_rotkehlchen_instance(
    rotki,
    start_with_logged_in_user,
    start_with_valid_premium,
    db_password,
    rotki_premium_credentials,
    username,
    blockchain_accounts,
    include_etherscan_key,
    include_cryptocompare_key,
    should_mock_price_queries,
    mocked_price_queries,
    ethereum_modules,
    db_settings,
    ignored_assets,
    tags,
    manually_tracked_balances,
    default_mock_price_value,
    ethereum_manager_connect_at_start,
    kusama_manager_connect_at_start,
    eth_rpc_endpoint,
    ksm_rpc_endpoint,
    aave_use_graph,
    max_tasks_num,
):
    if not start_with_logged_in_user:
        return

    # Mock the initial get settings to include the specified ethereum modules
    def mock_get_settings() -> DBSettings:
        settings = DBSettings(
            active_modules=ethereum_modules,
            eth_rpc_endpoint=eth_rpc_endpoint,
            ksm_rpc_endpoint=ksm_rpc_endpoint,
        )
        return settings

    settings_patch = patch.object(rotki,
                                  'get_settings',
                                  side_effect=mock_get_settings)

    # Do not connect to the usual nodes at start by default. Do not want to spam
    # them during our tests. It's configurable per test, with the default being nothing
    eth_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.ETHEREUM_NODES_TO_CONNECT_AT_START',
        new=ethereum_manager_connect_at_start,
    )
    ksm_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.KUSAMA_NODES_TO_CONNECT_AT_START',
        new=kusama_manager_connect_at_start,
    )
    ksm_connect_on_startup_patch = patch.object(
        rotki,
        '_connect_ksm_manager_on_startup',
        return_value=bool(blockchain_accounts.ksm),
    )
    # patch the constants to make sure that the periodic query for icons
    # does not run during tests
    size_patch = patch('rotkehlchen.rotkehlchen.ICONS_BATCH_SIZE', new=0)
    sleep_patch = patch('rotkehlchen.rotkehlchen.ICONS_QUERY_SLEEP',
                        new=999999)
    with settings_patch, eth_rpcconnect_patch, ksm_rpcconnect_patch, ksm_connect_on_startup_patch, size_patch, sleep_patch:  # noqa: E501
        rotki.unlock_user(
            user=username,
            password=db_password,
            create_new=True,
            sync_approval='no',
            premium_credentials=None,
        )
    # configure when task manager should run for tests
    rotki.task_manager.max_tasks_num = max_tasks_num

    if start_with_valid_premium:
        rotki.premium = Premium(rotki_premium_credentials)
        rotki.premium_sync_manager.premium = rotki.premium

    # After unlocking when all objects are created we need to also include
    # customized fixtures that may have been set by the tests
    rotki.chain_manager.accounts = blockchain_accounts
    add_settings_to_test_db(rotki.data.db, db_settings, ignored_assets)
    maybe_include_etherscan_key(rotki.data.db, include_etherscan_key)
    maybe_include_cryptocompare_key(rotki.data.db, include_cryptocompare_key)
    add_blockchain_accounts_to_db(rotki.data.db, blockchain_accounts)
    add_tags_to_test_db(rotki.data.db, tags)
    add_manually_tracked_balances_to_test_db(rotki.data.db,
                                             manually_tracked_balances)
    maybe_mock_historical_price_queries(
        historian=PriceHistorian(),
        should_mock_price_queries=should_mock_price_queries,
        mocked_price_queries=mocked_price_queries,
        default_mock_value=default_mock_price_value,
    )
    wait_until_all_nodes_connected(
        ethereum_manager_connect_at_start=ethereum_manager_connect_at_start,
        ethereum=rotki.chain_manager.ethereum,
    )
    wait_until_all_substrate_nodes_connected(
        substrate_manager_connect_at_start=kusama_manager_connect_at_start,
        substrate_manager=rotki.chain_manager.kusama,
    )

    aave = rotki.chain_manager.get_module('aave')
    if aave:
        aave.use_graph = aave_use_graph
示例#17
0
    def unlock_user(
        self,
        user: str,
        password: str,
        create_new: bool,
        sync_approval: Literal['yes', 'no', 'unknown'],
        premium_credentials: Optional[PremiumCredentials],
        initial_settings: Optional[ModifiableDBSettings] = None,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True

        May raise:
        - PremiumAuthenticationError if the password can't unlock the database.
        - AuthenticationError if premium_credentials are given and are invalid
        or can't authenticate with the server
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        - SystemPermissionError if the directory or DB file can not be accessed
        """
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
            initial_settings=initial_settings,
        )

        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new,
                                               initial_settings)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data,
                                                       password=password)
        # set the DB in the external services instances that need it
        self.cryptocompare.set_database(self.data.db)

        # Anything that was set above here has to be cleaned in case of failure in the next step
        # by reset_after_failed_account_creation_or_login()
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                given_premium_credentials=premium_credentials,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
            )
        except PremiumAuthenticationError:
            # Reraise it only if this is during the creation of a new account where
            # the premium credentials were given by the user
            if create_new:
                raise
            self.msg_aggregator.add_warning(
                'Could not authenticate the Rotki premium API keys found in the DB.'
                ' Has your subscription expired?', )
            # else let's just continue. User signed in succesfully, but he just
            # has unauthenticable/invalid premium credentials remaining in his DB

        settings = self.get_settings()
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='submit_usage_analytics',
            exception_is_error=False,
            method=maybe_submit_usage_analytics,
            should_submit=settings.submit_usage_analytics,
        )
        self.etherscan = Etherscan(database=self.data.db,
                                   msg_aggregator=self.msg_aggregator)
        self.beaconchain = BeaconChain(database=self.data.db,
                                       msg_aggregator=self.msg_aggregator)
        eth_rpc_endpoint = settings.eth_rpc_endpoint
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        PriceHistorian().set_oracles_order(settings.historical_price_oracles)

        self.accountant = Accountant(
            db=self.data.db,
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
            premium=self.premium,
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=settings.anonymized_logs)
        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        # Initialize blockchain querying modules
        ethereum_manager = EthereumManager(
            ethrpc_endpoint=eth_rpc_endpoint,
            etherscan=self.etherscan,
            database=self.data.db,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=ETHEREUM_NODES_TO_CONNECT_AT_START,
        )
        kusama_manager = SubstrateManager(
            chain=SubstrateChain.KUSAMA,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=KUSAMA_NODES_TO_CONNECT_AT_START,
            connect_on_startup=self._connect_ksm_manager_on_startup(),
            own_rpc_endpoint=settings.ksm_rpc_endpoint,
        )

        Inquirer().inject_ethereum(ethereum_manager)
        Inquirer().set_oracles_order(settings.current_price_oracles)

        self.chain_manager = ChainManager(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            ethereum_manager=ethereum_manager,
            kusama_manager=kusama_manager,
            msg_aggregator=self.msg_aggregator,
            database=self.data.db,
            greenlet_manager=self.greenlet_manager,
            premium=self.premium,
            eth_modules=settings.active_modules,
            data_directory=self.data_dir,
            beaconchain=self.beaconchain,
            btc_derivation_gap_limit=settings.btc_derivation_gap_limit,
        )
        self.events_historian = EventsHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
            chain_manager=self.chain_manager,
        )
        self.task_manager = TaskManager(
            max_tasks_num=DEFAULT_MAX_TASKS_NUM,
            greenlet_manager=self.greenlet_manager,
            api_task_greenlets=self.api_task_greenlets,
            database=self.data.db,
            cryptocompare=self.cryptocompare,
            premium_sync_manager=self.premium_sync_manager,
            chain_manager=self.chain_manager,
            exchange_manager=self.exchange_manager,
        )
        self.greenlet_manager.spawn_and_track(
            after_seconds=5,
            task_name='periodically_query_icons_until_all_cached',
            exception_is_error=False,
            method=self.icon_manager.periodically_query_icons_until_all_cached,
            batch_size=ICONS_BATCH_SIZE,
            sleep_time_secs=ICONS_QUERY_SLEEP,
        )
        self.user_is_logged_in = True
        log.debug('User unlocking complete')
示例#18
0
    def __init__(self, data_directory='.'):
        # pylint: disable=super-init-not-called
        self.data_directory = Path(data_directory)
        with open(self.data_directory / 'buchfink.yaml', 'r') as cfg:
            yaml_config = yaml.load(cfg, Loader=yaml.SafeLoader)
        self.config = config_schema(yaml_config)
        self.accounts = accounts_from_config(self.config)  # type: List[Account]
        self._active_eth_address = None  # type: Optional[ChecksumEthAddress]

        self.reports_directory = self.data_directory / "reports"
        self.trades_directory = self.data_directory / "trades"
        self.cache_directory = self.data_directory / "cache"
        self.balances_directory = self.data_directory / "balances"
        self.annotations_directory = self.data_directory / "annotations"
        self.user_data_dir = self.data_directory / "user"

        self.reports_directory.mkdir(exist_ok=True)
        self.trades_directory.mkdir(exist_ok=True)
        self.balances_directory.mkdir(exist_ok=True)
        self.cache_directory.mkdir(exist_ok=True)
        (self.cache_directory / 'cryptocompare').mkdir(exist_ok=True)
        (self.cache_directory / 'history').mkdir(exist_ok=True)
        (self.cache_directory / 'inquirer').mkdir(exist_ok=True)
        (self.cache_directory / 'coingecko').mkdir(exist_ok=True)

        self.last_write_ts: Optional[Timestamp] = None

        self._amm_swaps = []  # type: List[AMMSwap]
        self._eth_tx = []  # type: List[EthereumTransaction]
        self._eth_receipts_store = pickledb.load(self.cache_directory / 'receipts.db', False)
        self.cryptocompare = Cryptocompare(self.cache_directory / 'cryptocompare', self)
        self.coingecko = Coingecko()
        self.historian = PriceHistorian(
                self.cache_directory / 'history',
                self.cryptocompare,
                self.coingecko
            )
        self.inquirer = Inquirer(self.cache_directory / 'inquirer',
                self.cryptocompare,
                self.coingecko
            )
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator)

        # Initialize blockchain querying modules
        self.etherscan = Etherscan(database=self, msg_aggregator=self.msg_aggregator)
        GlobalDBHandler._GlobalDBHandler__instance = None
        self.globaldb = GlobalDBHandler(self.cache_directory)
        self.asset_resolver = AssetResolver()
        self.assets_updater = AssetsUpdater(self.msg_aggregator)
        self.ethereum_manager = EthereumManager(
            database=self,
            ethrpc_endpoint=self.get_eth_rpc_endpoint(),
            etherscan=self.etherscan,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=[]
        )
        self.inquirer.inject_ethereum(self.ethereum_manager)
        self.inquirer.set_oracles_order(self.get_settings().current_price_oracles)
        self.historian.set_oracles_order(self.get_settings().historical_price_oracles)
        self.beaconchain = BeaconChain(database=self, msg_aggregator=self.msg_aggregator)
示例#19
0
def initialize_mock_rotkehlchen_instance(
    rotki,
    start_with_logged_in_user,
    start_with_valid_premium,
    db_password,
    rotki_premium_credentials,
    username,
    blockchain_accounts,
    include_etherscan_key,
    include_cryptocompare_key,
    should_mock_price_queries,
    mocked_price_queries,
    ethereum_modules,
    db_settings,
    ignored_assets,
    tags,
    manually_tracked_balances,
    default_mock_price_value,
    ethereum_manager_connect_at_start,
    kusama_manager_connect_at_start,
    eth_rpc_endpoint,
    ksm_rpc_endpoint,
    aave_use_graph,
    max_tasks_num,
    legacy_messages_via_websockets,
    data_migration_version,
    use_custom_database,
    user_data_dir,
    perform_migrations_at_unlock,
    perform_upgrades_at_unlock,
):
    if not start_with_logged_in_user:
        return

    # Mock the initial get settings to include the specified ethereum modules
    def mock_get_settings() -> DBSettings:
        settings = DBSettings(
            active_modules=ethereum_modules,
            eth_rpc_endpoint=eth_rpc_endpoint,
            ksm_rpc_endpoint=ksm_rpc_endpoint,
        )
        return settings

    settings_patch = patch.object(rotki,
                                  'get_settings',
                                  side_effect=mock_get_settings)

    # Do not connect to the usual nodes at start by default. Do not want to spam
    # them during our tests. It's configurable per test, with the default being nothing
    eth_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.ETHEREUM_NODES_TO_CONNECT_AT_START',
        new=ethereum_manager_connect_at_start,
    )
    ksm_rpcconnect_patch = patch(
        'rotkehlchen.rotkehlchen.KUSAMA_NODES_TO_CONNECT_AT_START',
        new=kusama_manager_connect_at_start,
    )
    ksm_connect_on_startup_patch = patch.object(
        rotki,
        '_connect_ksm_manager_on_startup',
        return_value=bool(blockchain_accounts.ksm),
    )
    # patch the constants to make sure that the periodic query for icons
    # does not run during tests
    size_patch = patch('rotkehlchen.rotkehlchen.ICONS_BATCH_SIZE', new=0)
    sleep_patch = patch('rotkehlchen.rotkehlchen.ICONS_QUERY_SLEEP',
                        new=999999)

    create_new = True
    if use_custom_database is not None:
        _use_prepared_db(user_data_dir, use_custom_database)
        create_new = False

    with ExitStack() as stack:
        stack.enter_context(settings_patch)
        stack.enter_context(eth_rpcconnect_patch)
        stack.enter_context(ksm_rpcconnect_patch)
        stack.enter_context(ksm_connect_on_startup_patch)
        stack.enter_context(size_patch)
        stack.enter_context(sleep_patch)

        if perform_migrations_at_unlock is False:
            migrations_patch = patch.object(
                DataMigrationManager,
                'maybe_migrate_data',
                side_effect=lambda *args: None,
            )
            stack.enter_context(migrations_patch)

        if perform_upgrades_at_unlock is False:
            upgrades_patch = patch.object(
                DBUpgradeManager,
                'run_upgrades',
                side_effect=lambda *args: None,
            )
            stack.enter_context(upgrades_patch)

        rotki.unlock_user(
            user=username,
            password=db_password,
            create_new=create_new,
            sync_approval='no',
            premium_credentials=None,
        )
    # configure when task manager should run for tests
    rotki.task_manager.max_tasks_num = max_tasks_num

    if start_with_valid_premium:
        rotki.premium = Premium(rotki_premium_credentials)
        rotki.premium_sync_manager.premium = rotki.premium
        rotki.chain_manager.premium = rotki.premium
        # Add premium to all the modules
        for module_name in AVAILABLE_MODULES_MAP:
            module = rotki.chain_manager.get_module(module_name)
            if module is not None:
                module.premium = rotki.premium

    if legacy_messages_via_websockets is False:
        rotki.msg_aggregator.rotki_notifier = None

    # After unlocking when all objects are created we need to also include
    # customized fixtures that may have been set by the tests
    rotki.chain_manager.accounts = blockchain_accounts
    add_settings_to_test_db(rotki.data.db, db_settings, ignored_assets,
                            data_migration_version)
    maybe_include_etherscan_key(rotki.data.db, include_etherscan_key)
    maybe_include_cryptocompare_key(rotki.data.db, include_cryptocompare_key)
    add_blockchain_accounts_to_db(rotki.data.db, blockchain_accounts)
    add_tags_to_test_db(rotki.data.db, tags)
    add_manually_tracked_balances_to_test_db(rotki.data.db,
                                             manually_tracked_balances)
    maybe_mock_historical_price_queries(
        historian=PriceHistorian(),
        should_mock_price_queries=should_mock_price_queries,
        mocked_price_queries=mocked_price_queries,
        default_mock_value=default_mock_price_value,
    )
    wait_until_all_nodes_connected(
        ethereum_manager_connect_at_start=ethereum_manager_connect_at_start,
        ethereum=rotki.chain_manager.ethereum,
    )
    wait_until_all_substrate_nodes_connected(
        substrate_manager_connect_at_start=kusama_manager_connect_at_start,
        substrate_manager=rotki.chain_manager.kusama,
    )

    aave = rotki.chain_manager.get_module('aave')
    if aave:
        aave.use_graph = aave_use_graph