def _maybe_prime_globaldb(assets_dir: Path) -> None: """Maybe prime globaldb with assets from all_assets.json of the given directory May raise: - OSError if a file can't be found - KeyError if a key is missing from any of the expected dicts (should not happen) """ with open(assets_dir / 'all_assets.meta', 'r') as f: meta_data = json.loads(f.read()) last_version = GlobalDBHandler().get_setting_value( 'last_assets_json_version', 0) if last_version >= meta_data['version']: # also delete the all assets_cache once priming is done AssetResolver().clean_asset_json_cache() return with open(assets_dir / 'all_assets.json', 'r') as f: assets = json.loads(f.read()) ethereum_tokens = [] other_assets = [] for asset_id, entry in assets.items(): if asset_id == 'XD': continue # https://github.com/rotki/rotki/issues/2503 entry['identifier'] = asset_id asset_type = asset_type_mapping.get(entry['type'], None) if not asset_type: log.error( f'During priming GlobalDB skipping asset with id {asset_id} ' f'due to unknown type {entry["type"]}', ) continue entry['type'] = asset_type if asset_type == AssetType.ETHEREUM_TOKEN: ethereum_tokens.append(entry) else: other_assets.append(entry) GlobalDBHandler().add_all_assets_from_json( ethereum_tokens=ethereum_tokens, other_assets=other_assets) # noqa: E501 # in the end set the last version primed GlobalDBHandler().add_setting_value('last_assets_json_version', meta_data['version']) # also delete the all assets_cache once priming is done AssetResolver().clean_asset_json_cache() return
def create_globaldb(data_directory, ) -> GlobalDBHandler: # Since this is a singleton and we want it initialized everytime the fixture # is called make sure its instance is always starting from scratch GlobalDBHandler._GlobalDBHandler__instance = None # type: ignore handler = GlobalDBHandler(data_dir=data_directory) return handler
def __post_init__( self, form_with_incomplete_data: bool = False, direct_field_initialization: bool = False, ) -> None: if direct_field_initialization: return object.__setattr__(self, 'identifier', ETHEREUM_DIRECTIVE + self.identifier) super().__post_init__(form_with_incomplete_data) # TODO: figure out a way to move this out. Moved in here due to cyclic imports from rotkehlchen.assets.resolver import AssetResolver # isort:skip # noqa: E501 # pylint: disable=import-outside-toplevel from rotkehlchen.globaldb import GlobalDBHandler # isort:skip # noqa: E501 # pylint: disable=import-outside-toplevel data = AssetResolver().get_asset_data(self.identifier) # pylint: disable=no-member if not data.ethereum_address: raise DeserializationError( 'Tried to initialize a non Ethereum asset as Ethereum Token', ) object.__setattr__(self, 'ethereum_address', data.ethereum_address) object.__setattr__(self, 'decimals', data.decimals) object.__setattr__(self, 'protocol', data.protocol) underlying_tokens = GlobalDBHandler().fetch_underlying_tokens( data.ethereum_address) object.__setattr__(self, 'underlying_tokens', underlying_tokens)
def create_globaldb(data_directory, ) -> GlobalDBHandler: # Since this is a singleton and we want it initialized everytime the fixture # is called make sure its instance is always starting from scratch GlobalDBHandler._GlobalDBHandler__instance = None # type: ignore handler = GlobalDBHandler(data_dir=data_directory) # note: the addition of custom ethereum tokens is moved after resolver initialization # so that DB can be primed with all assets.json return handler
def __init__(self, args: argparse.Namespace) -> None: """Initialize the Rotkehlchen object This runs during backend initialization so it should be as light as possible. May Raise: - SystemPermissionError if the given data directory's permissions are not correct. """ # Can also be None after unlock if premium credentials did not # authenticate or premium server temporarily offline self.premium: Optional[Premium] = None self.user_is_logged_in: bool = False configure_logging(args) self.sleep_secs = args.sleep_secs if args.data_dir is None: self.data_dir = default_data_directory() else: self.data_dir = Path(args.data_dir) self.data_dir.mkdir(parents=True, exist_ok=True) if not os.access(self.data_dir, os.W_OK | os.R_OK): raise SystemPermissionError( f'The given data directory {self.data_dir} is not readable or writable', ) self.main_loop_spawned = False self.args = args self.api_task_greenlets: List[gevent.Greenlet] = [] self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager( msg_aggregator=self.msg_aggregator) self.exchange_manager = ExchangeManager( msg_aggregator=self.msg_aggregator) # Initialize the GlobalDBHandler singleton. Has to be initialized BEFORE asset resolver GlobalDBHandler(data_dir=self.data_dir) self.data = DataHandler(self.data_dir, self.msg_aggregator) self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None) self.coingecko = Coingecko() self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko) self.assets_updater = AssetsUpdater(self.msg_aggregator) # Initialize the Inquirer singleton Inquirer( data_dir=self.data_dir, cryptocompare=self.cryptocompare, coingecko=self.coingecko, ) # Keeps how many trades we have found per location. Used for free user limiting self.actions_per_location: Dict[str, Dict[Location, int]] = { 'trade': defaultdict(int), 'asset_movement': defaultdict(int), } self.task_manager: Optional[TaskManager] = None self.shutdown_event = gevent.event.Event()
def get_asset_data( asset_identifier: str, form_with_incomplete_data: bool = False, ) -> AssetData: """Get all asset data for a valid asset identifier Raises UnknownAsset if no data can be found """ instance = AssetResolver() # attempt read from memory cache -- always lower cached_data = instance.assets_cache.get(asset_identifier.lower(), None) if cached_data is not None: return cached_data check_json = False try: dbinstance = GlobalDBHandler() if dbinstance.get_setting_value('last_assets_json_version', 0) == 0: check_json = True except ModuleInitializationFailure: check_json = True if check_json: # still need to resolve out of the in memory all_assets.json if instance.all_assets is None: raise AssertionError( 'We need to check all_assets.json and cached data has been deleted', ) result = instance.all_assets.get(asset_identifier, None) if result is None: raise UnknownAsset(asset_identifier) return result # At this point we can use the global DB asset_data = dbinstance.get_asset_data(asset_identifier, form_with_incomplete_data) if asset_data is None: raise UnknownAsset(asset_identifier) # save in the memory cache -- always lower instance.assets_cache[asset_identifier.lower()] = asset_data return asset_data
def get_asset_data( asset_identifier: str, form_with_incomplete_data: bool = False, ) -> AssetData: """Get all asset data for a valid asset identifier Raises UnknownAsset if no data can be found """ instance = AssetResolver() # attempt read from memory cache -- always lower cached_data = instance.assets_cache.get(asset_identifier.lower(), None) if cached_data is not None: return cached_data dbinstance = GlobalDBHandler() # At this point we can use the global DB asset_data = dbinstance.get_asset_data(asset_identifier, form_with_incomplete_data) if asset_data is None: raise UnknownAsset(asset_identifier) # save in the memory cache -- always lower instance.assets_cache[asset_identifier.lower()] = asset_data return asset_data
def __init__(self, args: argparse.Namespace) -> None: """Initialize the Rotkehlchen object May Raise: - SystemPermissionError if the given data directory's permissions are not correct. """ self.lock = Semaphore() self.lock.acquire() # Can also be None after unlock if premium credentials did not # authenticate or premium server temporarily offline self.premium: Optional[Premium] = None self.user_is_logged_in: bool = False configure_logging(args) self.sleep_secs = args.sleep_secs if args.data_dir is None: self.data_dir = default_data_directory() else: self.data_dir = Path(args.data_dir) if not os.access(self.data_dir, os.W_OK | os.R_OK): raise SystemPermissionError( f'The given data directory {self.data_dir} is not readable or writable', ) self.main_loop_spawned = False self.args = args self.api_task_greenlets: List[gevent.Greenlet] = [] self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager( msg_aggregator=self.msg_aggregator) self.exchange_manager = ExchangeManager( msg_aggregator=self.msg_aggregator) # Initialize the GlobalDBHandler singleton. Has to be initialized BEFORE asset resolver GlobalDBHandler(data_dir=self.data_dir) self.data = DataHandler(self.data_dir, self.msg_aggregator) self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None) self.coingecko = Coingecko(data_directory=self.data_dir) self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko) self.assets_updater = AssetsUpdater(self.msg_aggregator) self.greenlet_manager.spawn_and_track( after_seconds=None, task_name='periodically_query_icons_until_all_cached', exception_is_error=False, method=self.icon_manager.periodically_query_icons_until_all_cached, batch_size=ICONS_BATCH_SIZE, sleep_time_secs=ICONS_QUERY_SLEEP, ) # Initialize the Inquirer singleton Inquirer( data_dir=self.data_dir, cryptocompare=self.cryptocompare, coingecko=self.coingecko, ) # Keeps how many trades we have found per location. Used for free user limiting self.actions_per_location: Dict[str, Dict[Location, int]] = { 'trade': defaultdict(int), 'asset_movement': defaultdict(int), } self.lock.release() self.task_manager: Optional[TaskManager] = None self.shutdown_event = gevent.event.Event()
def __init__(self, data_directory='.'): # pylint: disable=super-init-not-called self.data_directory = Path(data_directory) with open(self.data_directory / 'buchfink.yaml', 'r') as cfg: yaml_config = yaml.load(cfg, Loader=yaml.SafeLoader) self.config = config_schema(yaml_config) self.accounts = accounts_from_config(self.config) # type: List[Account] self._active_eth_address = None # type: Optional[ChecksumEthAddress] self.reports_directory = self.data_directory / "reports" self.trades_directory = self.data_directory / "trades" self.cache_directory = self.data_directory / "cache" self.balances_directory = self.data_directory / "balances" self.annotations_directory = self.data_directory / "annotations" self.user_data_dir = self.data_directory / "user" self.reports_directory.mkdir(exist_ok=True) self.trades_directory.mkdir(exist_ok=True) self.balances_directory.mkdir(exist_ok=True) self.cache_directory.mkdir(exist_ok=True) (self.cache_directory / 'cryptocompare').mkdir(exist_ok=True) (self.cache_directory / 'history').mkdir(exist_ok=True) (self.cache_directory / 'inquirer').mkdir(exist_ok=True) (self.cache_directory / 'coingecko').mkdir(exist_ok=True) self.last_write_ts: Optional[Timestamp] = None self._amm_swaps = [] # type: List[AMMSwap] self._eth_tx = [] # type: List[EthereumTransaction] self._eth_receipts_store = pickledb.load(self.cache_directory / 'receipts.db', False) self.cryptocompare = Cryptocompare(self.cache_directory / 'cryptocompare', self) self.coingecko = Coingecko() self.historian = PriceHistorian( self.cache_directory / 'history', self.cryptocompare, self.coingecko ) self.inquirer = Inquirer(self.cache_directory / 'inquirer', self.cryptocompare, self.coingecko ) self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator) # Initialize blockchain querying modules self.etherscan = Etherscan(database=self, msg_aggregator=self.msg_aggregator) GlobalDBHandler._GlobalDBHandler__instance = None self.globaldb = GlobalDBHandler(self.cache_directory) self.asset_resolver = AssetResolver() self.assets_updater = AssetsUpdater(self.msg_aggregator) self.ethereum_manager = EthereumManager( database=self, ethrpc_endpoint=self.get_eth_rpc_endpoint(), etherscan=self.etherscan, msg_aggregator=self.msg_aggregator, greenlet_manager=self.greenlet_manager, connect_at_start=[] ) self.inquirer.inject_ethereum(self.ethereum_manager) self.inquirer.set_oracles_order(self.get_settings().current_price_oracles) self.historian.set_oracles_order(self.get_settings().historical_price_oracles) self.beaconchain = BeaconChain(database=self, msg_aggregator=self.msg_aggregator)
class BuchfinkDB(DBHandler): """ This class is not very thought out and might need a refactor. Currently it does three things, namely: 1) preparing classes from Rotki to be used by higher-level functions 2) function as a Rotki DBHandler and provide data to Rotki classes 3) load and parse Buchfink config """ def __init__(self, data_directory='.'): # pylint: disable=super-init-not-called self.data_directory = Path(data_directory) with open(self.data_directory / 'buchfink.yaml', 'r') as cfg: yaml_config = yaml.load(cfg, Loader=yaml.SafeLoader) self.config = config_schema(yaml_config) self.accounts = accounts_from_config(self.config) # type: List[Account] self._active_eth_address = None # type: Optional[ChecksumEthAddress] self.reports_directory = self.data_directory / "reports" self.trades_directory = self.data_directory / "trades" self.cache_directory = self.data_directory / "cache" self.balances_directory = self.data_directory / "balances" self.annotations_directory = self.data_directory / "annotations" self.user_data_dir = self.data_directory / "user" self.reports_directory.mkdir(exist_ok=True) self.trades_directory.mkdir(exist_ok=True) self.balances_directory.mkdir(exist_ok=True) self.cache_directory.mkdir(exist_ok=True) (self.cache_directory / 'cryptocompare').mkdir(exist_ok=True) (self.cache_directory / 'history').mkdir(exist_ok=True) (self.cache_directory / 'inquirer').mkdir(exist_ok=True) (self.cache_directory / 'coingecko').mkdir(exist_ok=True) self.last_write_ts: Optional[Timestamp] = None self._amm_swaps = [] # type: List[AMMSwap] self._eth_tx = [] # type: List[EthereumTransaction] self._eth_receipts_store = pickledb.load(self.cache_directory / 'receipts.db', False) self.cryptocompare = Cryptocompare(self.cache_directory / 'cryptocompare', self) self.coingecko = Coingecko() self.historian = PriceHistorian( self.cache_directory / 'history', self.cryptocompare, self.coingecko ) self.inquirer = Inquirer(self.cache_directory / 'inquirer', self.cryptocompare, self.coingecko ) self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator) # Initialize blockchain querying modules self.etherscan = Etherscan(database=self, msg_aggregator=self.msg_aggregator) GlobalDBHandler._GlobalDBHandler__instance = None self.globaldb = GlobalDBHandler(self.cache_directory) self.asset_resolver = AssetResolver() self.assets_updater = AssetsUpdater(self.msg_aggregator) self.ethereum_manager = EthereumManager( database=self, ethrpc_endpoint=self.get_eth_rpc_endpoint(), etherscan=self.etherscan, msg_aggregator=self.msg_aggregator, greenlet_manager=self.greenlet_manager, connect_at_start=[] ) self.inquirer.inject_ethereum(self.ethereum_manager) self.inquirer.set_oracles_order(self.get_settings().current_price_oracles) self.historian.set_oracles_order(self.get_settings().historical_price_oracles) self.beaconchain = BeaconChain(database=self, msg_aggregator=self.msg_aggregator) def __del__(self): pass def get_asset_by_symbol(self, symbol: str) -> Asset: # TODO: this indirection function could incorporate a custom mapping from yaml config return deserialize_asset(symbol) def get_main_currency(self): return self.get_settings().main_currency def get_eth_rpc_endpoint(self): return self.config['settings'].get('eth_rpc_endpoint', None) def get_all_accounts(self) -> List[Account]: return self.accounts def get_all_reports(self) -> Iterable[ReportConfig]: for report_info in self.config['reports']: yield ReportConfig( name=str(report_info['name']), title=report_info.get('title'), template=report_info.get('template'), from_dt=datetime.fromisoformat(str(report_info['from'])), to_dt=datetime.fromisoformat(str(report_info['to'])) ) def get_settings(self, have_premium: bool = False) -> DBSettings: clean_settings = dict(self.config['settings']) if 'external_services' in clean_settings: del clean_settings['external_services'] return db_settings_from_dict(clean_settings, self.msg_aggregator) def get_ignored_assets(self): return [] def get_external_service_credentials( self, service_name: ExternalService, ) -> Optional[ExternalServiceApiCredentials]: """If existing it returns the external service credentials for the given service""" short_name = service_name.name.lower() api_key = self.config['settings'].get('external_services', {}).get(short_name) if not api_key: return None return ExternalServiceApiCredentials(service=service_name, api_key=api_key) def get_accountant(self) -> Accountant: return Accountant(self, None, self.msg_aggregator, True, premium=None) def get_blockchain_accounts(self) -> BlockchainAccounts: if self._active_eth_address: return BlockchainAccounts(eth=[self._active_eth_address], btc=[], ksm=[]) return BlockchainAccounts(eth=[], btc=[], ksm=[]) def get_trades_from_file(self, trades_file) -> List[Trade]: def safe_deserialize_trade(trade): try: return deserialize_trade(trade) except UnknownAsset: logger.warning('Ignoring trade with unknown asset: %s', trade) return None exchange = yaml.load(open(trades_file, 'r'), Loader=yaml.SafeLoader) return [ser_trade for ser_trade in [ safe_deserialize_trade(trade) for trade in exchange.get('trades', [])] if ser_trade is not None] \ + [ser_trade for ser_trade in [ safe_deserialize_trade(trade) for trade in exchange.get('actions', []) if 'buy' in trade or 'sell' in trade] if ser_trade is not None] def get_local_trades_for_account(self, account_name: Union[str, Account]) -> List[Trade]: if isinstance(account_name, str): account = [a for a in self.accounts if a.name == account_name][0] # type: Account else: account = account_name if account.account_type == 'file': trades_file = os.path.join(self.data_directory, account.config['file']) return self.get_trades_from_file(trades_file) trades_file = os.path.join(self.data_directory, 'trades', account.name + '.yaml') if os.path.exists(trades_file): return self.get_trades_from_file(trades_file) return [] def get_actions_from_file(self, actions_file): def safe_deserialize_ledger_action(action): if 'buy' in action or 'sell' in action: return None try: return deserialize_ledger_action(action) except UnknownAsset: logger.warning('Ignoring ledger action with unknown asset: %s', action) return None exchange = yaml.load(open(actions_file, 'r'), Loader=yaml.SafeLoader) return [ser_action for ser_action in [ safe_deserialize_ledger_action(action) for action in exchange.get('actions', []) ] if ser_action is not None] def get_local_ledger_actions_for_account(self, account_name: Union[str, Account]) \ -> List[Trade]: if isinstance(account_name, str): account = [a for a in self.accounts if a.name == account_name][0] # type: Account else: account = account_name if account.account_type == 'file': actions_file = self.data_directory / account.config['file'] if actions_file.exists(): return self.get_actions_from_file(actions_file) else: actions_file = self.data_directory / f'actions/{account.name}.yaml' if actions_file.exists(): return self.get_actions_from_file(actions_file) return [] def get_chain_manager(self, account: Account) -> ChainManager: if account.account_type == "ethereum": accounts = BlockchainAccounts(eth=[account.address], btc=[], ksm=[]) elif account.account_type == "bitcoin": accounts = BlockchainAccounts(eth=[], btc=[account.address], ksm=[]) else: raise ValueError('Unable to create chain manager for account') # Eventually we should allow premium credentials in config file premium = False eth_modules = self.get_settings().active_modules if not premium: eth_modules = [mod for mod in eth_modules if mod not in PREMIUM_ONLY_ETH_MODULES] logger.debug('Creating ChainManager with modules: %s', eth_modules) manager = ChainManager( database=self, blockchain_accounts=accounts, beaconchain=self.beaconchain, data_directory=self.data_directory, ethereum_manager=self.ethereum_manager, kusama_manager=None, msg_aggregator=self.msg_aggregator, btc_derivation_gap_limit=self.get_settings().btc_derivation_gap_limit, greenlet_manager=self.greenlet_manager, premium=premium, eth_modules=eth_modules ) # Monkey-patch function that uses singleton manager.queried_addresses_for_module = lambda self, module = None: [account] return manager def get_exchange(self, account: str) -> ExchangeInterface: account_info = [a for a in self.config['accounts'] if a['name'] == account][0] exchange_opts = dict( name=account_info['name'], api_key=str(account_info['api_key']), secret=str(account_info['secret']).encode(), database=self, msg_aggregator=self.msg_aggregator ) if account_info['exchange'] == 'kraken': exchange = Kraken(**exchange_opts) elif account_info['exchange'] == 'binance': exchange = Binance(**exchange_opts) elif account_info['exchange'] == 'coinbase': exchange = Coinbase(**exchange_opts) elif account_info['exchange'] == 'coinbasepro': exchange = Coinbasepro(**exchange_opts, passphrase=str(account_info['passphrase'])) elif account_info['exchange'] == 'gemini': exchange = Gemini(**exchange_opts) elif account_info['exchange'] == 'bitmex': exchange = Bitmex(**exchange_opts) elif account_info['exchange'] == 'bittrex': exchange = Bittrex(**exchange_opts) elif account_info['exchange'] == 'poloniex': exchange = Poloniex(**exchange_opts) elif account_info['exchange'] == 'bitcoinde': exchange = Bitcoinde(**exchange_opts) elif account_info['exchange'] == 'iconomi': exchange = Iconomi(**exchange_opts) else: raise ValueError("Unknown exchange: " + account_info['exchange']) return exchange def get_tokens_for_address_if_time(self, address, current_time): return None def save_tokens_for_address(self, address, tokens): pass def query_balances(self, account) -> BalanceSheet: if account.account_type == "exchange": exchange = self.get_exchange(account.name) api_key_is_valid, error = exchange.validate_api_key() if not api_key_is_valid: raise RuntimeError(error) balances, error = exchange.query_balances() if not error: logger.info( 'Fetched balances for %d assets from %s', len(balances.keys()), account.name ) return BalanceSheet(assets=balances, liabilities={}) raise RuntimeError(error) if account.account_type == "ethereum": manager = self.get_chain_manager(account) # This is a little hack because query_balances sometimes hooks back # into out get_blockchain_accounts() without providing context (for # example from makerdao module). self._active_eth_address = account.address manager.query_balances( blockchain=SupportedBlockchain.ETHEREUM, force_token_detection=True ) self._active_eth_address = None return reduce(operator.add, manager.balances.eth.values()) if account.account_type == "bitcoin": manager = self.get_chain_manager(account) manager.query_balances() btc = Asset('BTC') return BalanceSheet(assets={ btc: reduce(operator.add, manager.balances.btc.values()) }, liabilities={}) if account.account_type == "file": return self.get_balances_from_file(account.config['file']) return BalanceSheet(assets={}, liabilities={}) def fetch_balances(self, account): query_sheet = self.query_balances(account) path = self.annotations_directory / (account.name + '.yaml') if path.exists(): query_sheet += self.get_balances_from_file(path) self.write_balances(account, query_sheet) def get_balances(self, account) -> BalanceSheet: path = self.balances_directory / (account.name + '.yaml') if path.exists(): return self.get_balances_from_file(path) return BalanceSheet(assets={}, liabilities={}) def get_balances_from_file(self, path) -> BalanceSheet: account = yaml.load(open(path, 'r'), Loader=yaml.SafeLoader) assets = {} # type: Dict[Asset, Balance] liabilities = {} # type: Dict[Asset, Balance] if 'balances' in account: logger.warning('Found deprecated key "balances", please use "assets" instead.') for balance in account['balances']: balance, asset = deserialize_balance(balance, self) if asset in assets: assets[asset] += balance else: assets[asset] = balance if 'assets' in account: for balance in account['assets']: balance, asset = deserialize_balance(balance, self) if asset in assets: assets[asset] += balance else: assets[asset] = balance if 'liabilities' in account: for balance in account['liabilities']: balance, asset = deserialize_balance(balance, self) if asset in liabilities: liabilities[asset] += balance else: liabilities[asset] = balance return BalanceSheet(assets=assets, liabilities=liabilities) def write_balances(self, account: Account, balances: BalanceSheet): path = self.balances_directory / (account.name + '.yaml') with path.open('w') as balances_file: yaml.dump(serialize_balances(balances), stream=balances_file) def get_amm_swaps( self, from_ts: Optional[Timestamp] = None, to_ts: Optional[Timestamp] = None, location: Optional[Location] = None, address: Optional[ChecksumEthAddress] = None, ) -> List[AMMSwap]: return self._amm_swaps def add_amm_swaps(self, swaps: List[AMMSwap]) -> None: self._amm_swaps = [] self._amm_swaps.extend(swaps) def update_used_query_range(self, name: str, start_ts: Timestamp, end_ts: Timestamp) -> None: pass def update_used_block_query_range(self, name: str, from_block: int, to_block: int) -> None: pass def get_used_query_range(self, name: str) -> Optional[Tuple[Timestamp, Timestamp]]: return None def get_ethereum_transaction_receipt(self, tx_hash: str, manager: ChainManager): receipt = self._eth_receipts_store.get(tx_hash) if receipt: return receipt receipt = manager.ethereum.get_transaction_receipt(tx_hash) self._eth_receipts_store.set(tx_hash, receipt) self._eth_receipts_store.dump() return receipt def get_ignored_action_ids( self, action_type: Optional[ActionType], ) -> Dict[ActionType, List[str]]: return {} def add_ethereum_transactions( self, ethereum_transactions: List[EthereumTransaction], from_etherscan: bool, ) -> None: self._eth_tx = [] self._eth_tx.extend(ethereum_transactions) def get_ethereum_transactions( self, from_ts: Optional[Timestamp] = None, to_ts: Optional[Timestamp] = None, address: Optional[ChecksumEthAddress] = None, ) -> List[EthereumTransaction]: return self._eth_tx def perform_assets_updates(self): self.assets_updater.perform_update(None, 'remote') for token_data in self.config.get('tokens', []): eth_token = deserialize_ethereum_token(token_data) identifier = '_ceth_' + eth_token.address try: self.get_asset_by_symbol(identifier) logger.debug('Asset already exists: %s', eth_token) except UnknownAsset: self.globaldb.add_asset(identifier, AssetType.ETHEREUM_TOKEN, eth_token) try: self.get_asset_by_symbol(identifier) except UnknownAsset as exc: raise ValueError('Unable to add asset: ' + str(eth_token)) from exc self.asset_resolver.clean_memory_cache()