def __post_init__(self): """ Asset post initialization The only thing that is given to initialize an asset is a string. If a non string is given then it's probably a deserialization error or invalid data were given to us by the server if an API was queried. """ if not isinstance(self.identifier, str): raise DeserializationError( 'Non-string identifier ended up in asset initialization') if not AssetResolver().is_identifier_canonical(self.identifier): raise UnknownAsset(self.identifier) data = AssetResolver().get_asset_data(self.identifier) # Ugly hack to set attributes of a frozen data class as post init # https://docs.python.org/3/library/dataclasses.html#frozen-instances object.__setattr__(self, 'name', data.name) object.__setattr__(self, 'symbol', data.symbol) object.__setattr__(self, 'active', data.active) object.__setattr__(self, 'asset_type', data.asset_type) object.__setattr__(self, 'started', data.started) object.__setattr__(self, 'ended', data.ended) object.__setattr__(self, 'forked', data.forked) object.__setattr__(self, 'swapped_for', data.swapped_for)
def asset_resolver( data_dir, query_github_for_assets, mock_asset_meta_github_response, mock_asset_github_response, force_reinitialize_asset_resolver, ): """Run the first initialization of the AssetResolver singleton It's an autouse fixture so that it always gets initialized """ if force_reinitialize_asset_resolver: AssetResolver._AssetResolver__instance = None if query_github_for_assets: AssetResolver(data_dir) return # else mock the github request to return version lower than anything possible def mock_get_request(url: str) -> MockResponse: if url == 'https://raw.githubusercontent.com/rotki/rotki/develop/rotkehlchen/data/all_assets.meta': # noqa: E501 return MockResponse(200, mock_asset_meta_github_response) if url == 'https://raw.githubusercontent.com/rotki/rotki/develop/rotkehlchen/data/all_assets.json': # noqa: E501 return MockResponse(200, mock_asset_github_response) # else raise AssertionError('This mock should receive no other urls') get_patch = patch('requests.get', side_effect=mock_get_request) with get_patch: AssetResolver(data_dir)
def __post_init__(self) -> None: """ Asset post initialization The only thing that is given to initialize an asset is a string. If a non string is given then it's probably a deserialization error or invalid data were given to us by the server if an API was queried. """ if not isinstance(self.identifier, str): raise DeserializationError( 'Tried to initialize an asset out of a non-string identifier', ) canonical_id = AssetResolver().is_identifier_canonical(self.identifier) if canonical_id is None: raise UnknownAsset(self.identifier) # else let's make sure we got the canonical id in our data struct object.__setattr__(self, 'identifier', canonical_id) data = AssetResolver().get_asset_data(self.identifier) # Ugly hack to set attributes of a frozen data class as post init # https://docs.python.org/3/library/dataclasses.html#frozen-instances object.__setattr__(self, 'name', data.name) object.__setattr__(self, 'symbol', data.symbol) object.__setattr__(self, 'active', data.active) object.__setattr__(self, 'asset_type', data.asset_type) object.__setattr__(self, 'started', data.started) object.__setattr__(self, 'ended', data.ended) object.__setattr__(self, 'forked', data.forked) object.__setattr__(self, 'swapped_for', data.swapped_for) object.__setattr__(self, 'cryptocompare', data.cryptocompare) object.__setattr__(self, 'coingecko', data.coingecko)
def __post_init__(self): if not AssetResolver().is_identifier_canonical(self.identifier): raise UnknownAsset(self.identifier) data = AssetResolver().get_asset_data(self.identifier) # Ugly hack to set attributes of a frozen data class as post init # https://docs.python.org/3/library/dataclasses.html#frozen-instances object.__setattr__(self, 'name', data.name) object.__setattr__(self, 'symbol', data.symbol) object.__setattr__(self, 'active', data.active) object.__setattr__(self, 'asset_type', data.asset_type) object.__setattr__(self, 'started', data.started) object.__setattr__(self, 'ended', data.ended) object.__setattr__(self, 'forked', data.forked) object.__setattr__(self, 'swapped_for', data.swapped_for)
def query_uncached_icons_batch(self, batch_size: int) -> bool: """Queries a batch of uncached icons for assets Returns true if there is more icons left to cache after this batch. """ coingecko_integrated_assets = [] for identifier, asset_data in AssetResolver().assets.items(): asset_type = asset_type_mapping[asset_data['type']] if asset_type != AssetType.FIAT and asset_data['coingecko'] != '': coingecko_integrated_assets.append(identifier) cached_assets = [ str(x.name)[:-10] for x in self.icons_dir.glob('*_thumb.png') if x.is_file() ] uncached_assets = ( set(coingecko_integrated_assets) - set(cached_assets) - self.failed_assets ) log.info( f'Periodic task to query coingecko for {batch_size} uncached asset icons. ' f'Uncached assets: {len(uncached_assets)}. Cached assets: {len(cached_assets)}', ) for asset_name in itertools.islice(uncached_assets, batch_size): self._query_coingecko_for_icon(Asset(asset_name)) return len(uncached_assets) > batch_size
def __post_init__( self, form_with_incomplete_data: bool = False, direct_field_initialization: bool = False, ) -> None: if direct_field_initialization: return object.__setattr__(self, 'identifier', ETHEREUM_DIRECTIVE + self.identifier) super().__post_init__(form_with_incomplete_data) # TODO: figure out a way to move this out. Moved in here due to cyclic imports from rotkehlchen.assets.resolver import AssetResolver # isort:skip # noqa: E501 # pylint: disable=import-outside-toplevel from rotkehlchen.globaldb import GlobalDBHandler # isort:skip # noqa: E501 # pylint: disable=import-outside-toplevel data = AssetResolver().get_asset_data(self.identifier) # pylint: disable=no-member if not data.ethereum_address: raise DeserializationError( 'Tried to initialize a non Ethereum asset as Ethereum Token', ) object.__setattr__(self, 'ethereum_address', data.ethereum_address) object.__setattr__(self, 'decimals', data.decimals) object.__setattr__(self, 'protocol', data.protocol) underlying_tokens = GlobalDBHandler().fetch_underlying_tokens( data.ethereum_address) object.__setattr__(self, 'underlying_tokens', underlying_tokens)
def _force_remote(cursor: sqlite3.Cursor, local_asset: Asset, full_insert: str) -> None: """Force the remote entry into the database by deleting old one and doing the full insert. May raise an sqlite3 error if something fails. """ cursor.executescript('PRAGMA foreign_keys = OFF;') if local_asset.asset_type == AssetType.ETHEREUM_TOKEN: token = EthereumToken.from_asset(local_asset) cursor.execute( 'DELETE FROM ethereum_tokens WHERE address=?;', (token.ethereum_address, ), # type: ignore # token != None ) else: cursor.execute( 'DELETE FROM common_asset_details WHERE asset_id=?;', (local_asset.identifier, ), ) cursor.execute( 'DELETE FROM assets WHERE identifier=?;', (local_asset.identifier, ), ) cursor.executescript('PRAGMA foreign_keys = ON;') # Insert new entry. Since identifiers are the same, no foreign key constrains should break executeall(cursor, full_insert) AssetResolver().clean_memory_cache(local_asset.identifier.lower())
def __init__(self, data_directory: FilePath, msg_aggregator: MessagesAggregator): self.logged_in = False self.data_directory = data_directory self.eth_tokens = AssetResolver().get_all_eth_tokens() self.username = '******' self.msg_aggregator = msg_aggregator
def test_asset_identifiers_are_unique_all_lowercased(): """Test that all asset identifiers would be unique if we do a lowercase comparison""" identifier_set = set() for asset_id, _ in AssetResolver().assets.items(): assert asset_id.lower( ) not in identifier_set, f'id {asset_id} already in the assets set' identifier_set.add(asset_id)
def analyze_bittrex_assets(currencies: List[Dict[str, Any]]): """Go through all bittrex assets and print info whether or not Rotkehlchen supports each asset or not. This function should be used when wanting to analyze/categorize new Bittrex assets """ checking_index = 0 for idx, bittrex_asset in enumerate(currencies): if idx >= checking_index: symbol = bittrex_asset['Currency'] if symbol in UNSUPPORTED_BITTREX_ASSETS: print(f'{idx} - {symbol} is NOT SUPPORTED') continue if not AssetResolver().is_identifier_canonical(symbol): raise AssertionError( f'{idx} - {symbol} is not known. ' f'Bittrex name: {bittrex_asset["CurrencyLong"]}', ) else: asset = Asset(symbol) print( f'{idx} - {symbol} with name {asset.name} ' f'is known. Bittrex name: {bittrex_asset["CurrencyLong"]}', )
def test_asset_with_unknown_type_does_not_crash(asset_resolver): # pylint: disable=unused-argument """Test that finding an asset with an unknown type does not crash Rotki""" new_asset = Asset("COMPRLASSET") assert new_asset.name == 'Completely real asset, totally not for testing only' token_list = AssetResolver().get_all_eth_token_info() assert len(token_list) == 0 # After the test runs we must reset the asset resolver so that it goes back to # the normal list of assets AssetResolver._AssetResolver__instance = None
def __post_init__(self): super().__post_init__() data = AssetResolver().get_asset_data(self.identifier) if not data.ethereum_address: raise ValueError('Tried to initialize a non Ethereum asset as Ethereum Token') object.__setattr__(self, 'ethereum_address', data.ethereum_address) object.__setattr__(self, 'decimals', data.decimals)
def __init__(self, args: argparse.Namespace) -> None: """Initialize the Rotkehlchen object May Raise: - SystemPermissionError if the given data directory's permissions are not correct. """ self.lock = Semaphore() self.lock.acquire() # Can also be None after unlock if premium credentials did not # authenticate or premium server temporarily offline self.premium: Optional[Premium] = None self.user_is_logged_in: bool = False configure_logging(args) self.sleep_secs = args.sleep_secs if args.data_dir is None: self.data_dir = default_data_directory() else: self.data_dir = Path(args.data_dir) if not os.access(self.data_dir, os.W_OK | os.R_OK): raise SystemPermissionError( f'The given data directory {self.data_dir} is not readable or writable', ) self.args = args self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator) self.exchange_manager = ExchangeManager(msg_aggregator=self.msg_aggregator) # Initialize the AssetResolver singleton AssetResolver(data_directory=self.data_dir) self.data = DataHandler(self.data_dir, self.msg_aggregator) self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None) self.coingecko = Coingecko() self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko) self.greenlet_manager.spawn_and_track( after_seconds=None, task_name='periodically_query_icons_until_all_cached', method=self.icon_manager.periodically_query_icons_until_all_cached, batch_size=ICONS_BATCH_SIZE, sleep_time_secs=ICONS_QUERY_SLEEP, ) # Initialize the Inquirer singleton Inquirer( data_dir=self.data_dir, cryptocompare=self.cryptocompare, coingecko=self.coingecko, ) # Keeps how many trades we have found per location. Used for free user limiting self.actions_per_location: Dict[str, Dict[Location, int]] = { 'trade': defaultdict(int), 'asset_movement': defaultdict(int), } self.lock.release() self.shutdown_event = gevent.event.Event()
def asset_resolver( globaldb, query_github_for_assets, mock_asset_meta_github_response, mock_asset_github_response, force_reinitialize_asset_resolver, use_clean_caching_directory, custom_ethereum_tokens, ): """Run the first initialization of the AssetResolver singleton It's an autouse fixture so that it always gets initialized """ # If we need to reinitialize asset resolver, do it. We need to if: # (1) test asks for it # (2) test uses clean directory, so the previously primed DB no longer exists if force_reinitialize_asset_resolver or use_clean_caching_directory: AssetResolver._AssetResolver__instance = None if query_github_for_assets: resolver = AssetResolver() else: # mock the github request to return version lower than anything possible def mock_get_request(url: str) -> MockResponse: if url == 'https://raw.githubusercontent.com/rotki/rotki/develop/rotkehlchen/data/all_assets.meta': # noqa: E501 return MockResponse(200, mock_asset_meta_github_response) if url == 'https://raw.githubusercontent.com/rotki/rotki/develop/rotkehlchen/data/all_assets.json': # noqa: E501 return MockResponse(200, mock_asset_github_response) # else raise AssertionError('This mock should receive no other urls') get_patch = patch('requests.get', side_effect=mock_get_request) with get_patch: resolver = AssetResolver() # add any custom ethereum tokens given by the fixtures for a test if custom_ethereum_tokens is not None: for entry in custom_ethereum_tokens: asset_id = ETHEREUM_DIRECTIVE + entry.ethereum_address globaldb.add_asset(asset_id=asset_id, asset_type=AssetType.ETHEREUM_TOKEN, data=entry) return resolver
def __init__(self, args: argparse.Namespace) -> None: self.lock = Semaphore() self.lock.acquire() # Can also be None after unlock if premium credentials did not # authenticate or premium server temporarily offline self.premium: Optional[Premium] = None self.user_is_logged_in = False logfilename = None if args.logtarget == 'file': logfilename = args.logfile if args.loglevel == 'debug': loglevel = logging.DEBUG elif args.loglevel == 'info': loglevel = logging.INFO elif args.loglevel == 'warn': loglevel = logging.WARN elif args.loglevel == 'error': loglevel = logging.ERROR elif args.loglevel == 'critical': loglevel = logging.CRITICAL else: raise AssertionError('Should never get here. Illegal log value') logging.basicConfig( filename=logfilename, filemode='w', level=loglevel, format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s', datefmt='%d/%m/%Y %H:%M:%S %Z', ) if not args.logfromothermodules: logging.getLogger('urllib3').setLevel(logging.CRITICAL) logging.getLogger('urllib3.connectionpool').setLevel( logging.CRITICAL) self.sleep_secs = args.sleep_secs self.data_dir = args.data_dir self.args = args self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager( msg_aggregator=self.msg_aggregator) self.exchange_manager = ExchangeManager( msg_aggregator=self.msg_aggregator) self.all_eth_tokens = AssetResolver().get_all_eth_tokens() self.data = DataHandler(self.data_dir, self.msg_aggregator) self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None) # Initialize the Inquirer singleton Inquirer(data_dir=self.data_dir, cryptocompare=self.cryptocompare) self.lock.release() self.shutdown_event = gevent.event.Event()
def __post_init__(self) -> None: super().__post_init__() data = AssetResolver().get_asset_data(self.identifier) # pylint: disable=no-member if not data.ethereum_address: raise DeserializationError( 'Tried to initialize a non Ethereum asset as Ethereum Token', ) object.__setattr__(self, 'ethereum_address', data.ethereum_address) object.__setattr__(self, 'decimals', data.decimals)
def __init__(self, data_directory='.'): self.data_directory = Path(data_directory) self.config = yaml.load(open(self.data_directory / 'buchfink.yaml', 'r'), Loader=yaml.SafeLoader) self.reports_directory = self.data_directory / "reports" self.trades_directory = self.data_directory / "trades" self.cache_directory = self.data_directory / "cache" self.reports_directory.mkdir(exist_ok=True) self.trades_directory.mkdir(exist_ok=True) self.cache_directory.mkdir(exist_ok=True) (self.cache_directory / 'cryptocompare').mkdir(exist_ok=True) (self.cache_directory / 'history').mkdir(exist_ok=True) (self.cache_directory / 'inquirer').mkdir(exist_ok=True) self.cryptocompare = Cryptocompare( self.cache_directory / 'cryptocompare', self) self.historian = PriceHistorian(self.cache_directory / 'history', '01/01/2014', self.cryptocompare) self.inquirer = Inquirer(self.cache_directory / 'inquirer', self.cryptocompare) self.msg_aggregator = MessagesAggregator() self.greenlet_manager = GreenletManager( msg_aggregator=self.msg_aggregator) # Initialize blockchain querying modules self.etherscan = Etherscan(database=self, msg_aggregator=self.msg_aggregator) self.all_eth_tokens = AssetResolver().get_all_eth_tokens() self.alethio = Alethio( database=self, msg_aggregator=self.msg_aggregator, all_eth_tokens=self.all_eth_tokens, ) self.ethereum_manager = EthereumManager( ethrpc_endpoint=self.get_eth_rpc_endpoint(), etherscan=self.etherscan, msg_aggregator=self.msg_aggregator, ) #self.chain_manager = ChainManager( # blockchain_accounts=[], # owned_eth_tokens=[], # ethereum_manager=self.ethereum_manager, # msg_aggregator=self.msg_aggregator, # alethio=alethio, # greenlet_manager=self.greenlet_manager, # premium=False, # eth_modules=ethereum_modules, #) self.ethereum_analyzer = EthereumAnalyzer( ethereum_manager=self.ethereum_manager, database=self, )
def test_cryptocompare_asset_support(cryptocompare): """Try to detect if a token that we have as not supported by cryptocompare got added""" cc_assets = cryptocompare.all_coins() exceptions = ( 'BKC', # Bankcoin Cash but Balkan Coin in CC 'BNC', # Bionic but Benja Coin in CC 'BTG-2', # Bitgem but Bitcoin Gold in CC 'BTR', # Bitether but Bither in CC 'CBC-2', # Cashbery coin but Casino Betting Coin in CC 'CCN', # CustomContractnetwork but CannaCoin in CC 'CMCT-2', # Cyber Movie Chain but Crowd Machine in CC 'CORN-2', # Cornichon but Corn in CC 'CTX', # Centauri coin but CarTaxi in CC 'DIT', # Direct insurance token but DitCoin in CC 'DRM', # Dreamcoin but Dreamchain in CC 'DTX-2', # Digital Ticks but Data Exchange in CC 'GNC', # Galaxy network but Greencoin in CC 'KNT', # Kora network but Knekted in CC 'LKY', # Linkey but LuckyCoin in CC 'NTK-2', # Netkoin but Neurotoken in CC 'PAN', # Panvala but Pantos in CC 'PTT', # Proton token but Pink Taxi Token in CC 'RMC', # Remicoin but Russian Miner Coin in CC 'SOUL-2', # Cryptosoul but Phantasma in CC 'TIC', # Thingschain but True Investment Coin in CC 'TOK', # TOKOK but Tokugawa Coin in CC 'VD', # Bitcoin card but Vindax Coin in CC 'DT', # Dragon Token but Dark Token in CC 'MUST', # Must (Cometh) but Must protocol in CC 'SDT-2', # Stake DAO token but TerraSDT in CC 'BAC', # Basis Cash but BACoin in CC 'IHF', # waiting until cryptocompare fixes historical price for this. https://github.com/rotki/rotki/pull/2176 # noqa: E501 'FLOW', # FLOW from dapper labs but "Flow Protocol" in CC 'NCT-2', # Name change token but Polyswarm in CC 'NDX', # newdex token but Index token in CC 'ARCH-2', # Archer DAO Governance token but Archcoin in CC 'AC-2', # Acoconut token but Asiacoin in CC 'TON', # Tontoken but Tokamak network in CC 'FNK', # Finiko token but FunKeyPai network in CC 'LOTTO', # Lotto token but LottoCoin in CC 'XFI', # Dfinance token but XFinance in CC ) for identifier, asset_data in AssetResolver().assets.items(): potential_support = ( asset_data.get('cryptocompare', None) == '' and asset_data['symbol'] in cc_assets and identifier not in exceptions ) if potential_support: msg = ( f'We have {identifier} as not supported by cryptocompare but ' f'the symbol appears in its supported assets' ) test_warnings.warn(UserWarning(msg))
def test_tokens_address_is_checksummed(): """Test that all ethereum saved token asset addresses are checksummed""" for _, asset_data in AssetResolver().assets.items(): asset_type = asset_type_mapping[asset_data['type']] if asset_type not in (AssetType.ETH_TOKEN_AND_MORE, AssetType.ETH_TOKEN): continue msg = (f'Ethereum token\'s {asset_data["name"]} ethereum address ' f'is not checksummed {asset_data["ethereum_address"]}') assert is_checksum_address(asset_data['ethereum_address']), msg
def query_tokens_for_addresses( self, addresses: List[ChecksumEthAddress], force_detection: bool, ) -> TokensReturn: """Queries/detects token balances for a list of addresses If an address's tokens were recently autodetected they are not detected again but the balances are simply queried. Unless force_detection is True. Returns the token balances of each address and the usd prices of the tokens """ log.debug( 'Querying/detecting token balances for all addresses', force_detection=force_detection, ) all_tokens = AssetResolver().get_all_eth_token_info() # With etherscan with chunks > 120, we get request uri too large # so the limitation is not in the gas, but in the request uri length etherscan_chunks = list( get_chunks(all_tokens, n=ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH)) other_chunks = list( get_chunks(all_tokens, n=OTHER_MAX_TOKEN_CHUNK_LENGTH)) now = ts_now() token_usd_price: Dict[EthereumToken, Price] = {} result = {} for address in addresses: saved_list = self.db.get_tokens_for_address_if_time( address=address, current_time=now) if force_detection or saved_list is None: balances = self.detect_tokens_for_address( address=address, token_usd_price=token_usd_price, etherscan_chunks=etherscan_chunks, other_chunks=other_chunks, ) else: if len(saved_list) == 0: continue # Do not query if we know the address has no tokens balances = defaultdict(FVal) self._get_tokens_balance_and_price( address=address, tokens=[x.token_info() for x in saved_list], balances=balances, token_usd_price=token_usd_price, call_order=None, # use defaults ) result[address] = balances return result, token_usd_price
def fixture_globaldb(globaldb_version, tmpdir_factory): # clean the previous resolver memory cache, as it # may have cached results from a discarded database AssetResolver().clean_memory_cache() root_dir = Path(__file__).resolve().parent.parent.parent if globaldb_version is None: # no specific version -- normal test source_db_path = root_dir / 'data' / 'global.db' else: source_db_path = root_dir / 'tests' / 'data' / f'v{globaldb_version}_global.db' new_data_dir = Path(tmpdir_factory.mktemp('test_data_dir')) new_global_dir = new_data_dir / 'global_data' new_global_dir.mkdir(parents=True, exist_ok=True) copyfile(source_db_path, new_global_dir / 'global.db') return create_globaldb(new_data_dir)
def __post_init__(self, form_with_incomplete_data: bool = False) -> None: super().__post_init__() # TODO: figure out a way to move this out. Moved in here due to cyclic imports from rotkehlchen.assets.resolver import AssetResolver # isort:skip # noqa: E501 # pylint: disable=import-outside-toplevel data = AssetResolver().get_asset_data(self.identifier) # pylint: disable=no-member if not data.ethereum_address: raise DeserializationError( 'Tried to initialize a non Ethereum asset as Ethereum Token', ) object.__setattr__(self, 'ethereum_address', data.ethereum_address) object.__setattr__(self, 'decimals', data.decimals)
def test_coingecko_identifiers_are_reachable(data_dir): """ Test that all assets have a coingecko entry and that all the identifiers exist in coingecko """ coingecko = Coingecko(data_directory=data_dir) all_coins = coingecko.all_coins() for identifier, asset_data in AssetResolver().assets.items(): if identifier in DELISTED_ASSETS: # delisted assets won't be in the mapping continue asset_type = asset_type_mapping[asset_data['type']] if asset_type == AssetType.FIAT: continue found = True coingecko_str = asset_data.get('coingecko', None) msg = f'Asset {identifier} does not have a coingecko entry' assert coingecko_str is not None, msg if coingecko_str != '': found = False for entry in all_coins: if coingecko_str == entry['id']: found = True break suggestions = [] if not found: for entry in all_coins: if entry['symbol'].upper() == asset_data['symbol'].upper(): suggestions.append( (entry['id'], entry['name'], entry['symbol'])) continue if entry['name'].upper() == asset_data['symbol'].upper(): suggestions.append( (entry['id'], entry['name'], entry['symbol'])) continue msg = f'Asset {identifier} coingecko mapping does not exist.' if len(suggestions) != 0: for s in suggestions: msg += f'\nSuggestion: id:{s[0]} name:{s[1]} symbol:{s[2]}' if not found: test_warnings.warn(UserWarning(msg))
def test_coingecko_identifiers_are_reachable(): """ Test that all assets have a coingecko entry and that all the identifiers exist in coingecko """ coins_delisted_from_coingecko = [ 'FLUZ', 'EBCH', 'GOLOS', 'NPER', 'BLN', 'PIX' ] coingecko = Coingecko() all_coins = coingecko.all_coins() for identifier, asset_data in AssetResolver().assets.items(): if identifier in coins_delisted_from_coingecko: # data = coingecko.asset_data(Asset(identifier)) # Figure out if the removed assets can still be queried # for historical prices. If not, then remove their mapping from # all_assets.json and remove them from this tests. continue asset_type = asset_type_mapping[asset_data['type']] if asset_type == AssetType.FIAT: continue coingecko_str = asset_data.get('coingecko', None) msg = f'Asset {identifier} does not have a coingecko entry' assert coingecko_str is not None, msg if coingecko_str != '': found = False for entry in all_coins: if coingecko_str == entry['id']: found = True break suggestions = [] if not found: for entry in all_coins: if entry['symbol'].upper() == asset_data['symbol']: suggestions.append( (entry['id'], entry['name'], entry['symbol'])) msg = f'Asset {identifier} coingecko mapping does not exist.' if len(suggestions) != 0: for s in suggestions: msg += f'\nSuggestion: id:{s[0]} name:{s[1]} symbol:{s[2]}' assert found, msg
def test_open_new_globaldb_with_old_rotki(tmpdir_factory): """Test for https://github.com/rotki/rotki/issues/2781""" # clean the previous resolver memory cache, as it # may have cached results from a discarded database AssetResolver().clean_memory_cache() version = 9999999999 root_dir = Path(__file__).resolve().parent.parent.parent source_db_path = root_dir / 'tests' / 'data' / f'v{version}_global.db' new_data_dir = Path(tmpdir_factory.mktemp('test_data_dir')) new_global_dir = new_data_dir / 'global_data' new_global_dir.mkdir(parents=True, exist_ok=True) copyfile(source_db_path, new_global_dir / 'global.db') with pytest.raises(ValueError) as excinfo: create_globaldb(new_data_dir) msg = ( f'Tried to open a rotki version intended to work with GlobalDB v{GLOBAL_DB_VERSION} ' f'but the GlobalDB found in the system is v{version}. Bailing ...') assert msg in str(excinfo.value)
def analyze_binance_assets(sorted_assets): """Go through all binance assets and print info whether or not Rotkehlchen supports each asset or not. This function should be used when wanting to analyze/categorize new Binance assets """ length = len(sorted_assets) for idx, binance_asset in enumerate(sorted_assets): if binance_asset in RENAMED_BINANCE_ASSETS: continue binance_asset = BINANCE_TO_WORLD.get(binance_asset, binance_asset) if not AssetResolver().is_identifier_canonical(binance_asset): raise AssertionError( f'{idx}/{length} - {binance_asset} is not known.', ) else: asset = Asset(binance_asset) print( f'{idx}/{length} - {binance_asset} with name {asset.name} is known', )
def __post_init__(self, form_with_incomplete_data: bool = False) -> None: """ Asset post initialization The only thing that is given to initialize an asset is a string. If a non string is given then it's probably a deserialization error or invalid data were given to us by the server if an API was queried. If `form_with_incomplete_data` is given and is True then we allow the generation of an asset object even if the corresponding underlying object is missing important data such as name, symbol, token decimals etc. In most case this is not wanted except for some exception like passing in some functions for icon generation. May raise UnknownAsset if the asset identifier can't be matched to anything """ if not isinstance(self.identifier, str): raise DeserializationError( 'Tried to initialize an asset out of a non-string identifier', ) # TODO: figure out a way to move this out. Moved in here due to cyclic imports from rotkehlchen.assets.resolver import AssetResolver # isort:skip # noqa: E501 # pylint: disable=import-outside-toplevel data = AssetResolver().get_asset_data(self.identifier, form_with_incomplete_data) # make sure same case of identifier as in DB is saved in the structure object.__setattr__(self, 'identifier', data.identifier) # Ugly hack to set attributes of a frozen data class as post init # https://docs.python.org/3/library/dataclasses.html#frozen-instances object.__setattr__(self, 'name', data.name) object.__setattr__(self, 'symbol', data.symbol) object.__setattr__(self, 'active', data.active) object.__setattr__(self, 'asset_type', data.asset_type) object.__setattr__(self, 'started', data.started) object.__setattr__(self, 'ended', data.ended) object.__setattr__(self, 'forked', data.forked) object.__setattr__(self, 'swapped_for', data.swapped_for) object.__setattr__(self, 'cryptocompare', data.cryptocompare) object.__setattr__(self, 'coingecko', data.coingecko)
def test_cryptocompare_asset_support(cryptocompare): """Try to detect if a token that we have as not supported by cryptocompare got added""" cc_assets = cryptocompare.all_coins() exceptions = ( 'BKC', # Bankcoin Cash but Balkan Coin in CC 'BNC', # Bionic but Benja Coin in CC 'BTG-2', # Bitgem but Bitcoin Gold in CC 'BTR', # Bitether but Bither in CC 'CBC-2', # Cashbery coin but Casino Betting Coin in CC 'CCN', # CustomContractnetwork but CannaCoin in CC 'CMCT-2', # Cyber Movie Chain but Crowd Machine in CC 'CORN-2', # Cornichon but Corn in CC 'CTX', # Centauri coin but CarTaxi in CC 'DIT', # Direct insurance token but DitCoin in CC 'DRM', # Dreamcoin but Dreamchain in CC 'DTX-2', # Digital Ticks but Data Exchange in CC 'GNC', # Galaxy network but Greencoin in CC 'KNT', # Kora network but Knekted in CC 'LKY', # Linkey but LuckyCoin in CC 'NTK-2', # Netkoin but Neurotoken in CC 'PAN', # Panvala but Pantos in CC 'PTT', # Proton token but Pink Taxi Token in CC 'RMC', # Remicoin but Russian Miner Coin in CC 'SOUL-2', # Cryptosoul but Phantasma in CC 'TIC', # Thingschain but True Investment Coin in CC 'TOK', # TOKOK but Tokugawa Coin in CC 'VD', # Bitcoin card but Vindax Coin in CC 'DT', # Dragon Token but Dark Token in CC ) for identifier, asset_data in AssetResolver().assets.items(): potential_support = (asset_data.get('cryptocompare', None) == '' and asset_data['symbol'] in cc_assets and identifier not in exceptions) if potential_support: msg = ( f'We have {identifier} as not supported by cryptocompare but ' f'the symbol appears in its supported assets') test_warnings.warn(UserWarning(msg))
def _apply_single_version_update( self, cursor: sqlite3.Cursor, version: int, text: str, conflicts: Optional[Dict[Asset, Literal['remote', 'local']]], ) -> None: lines = text.splitlines() for action, full_insert in zip(*[iter(lines)] * 2): if full_insert == '*': full_insert = action try: remote_asset_data = self._parse_full_insert(full_insert) except DeserializationError as e: self.msg_aggregator.add_warning( f'Skipping entry during assets update to v{version} due ' f'to a deserialization error. {str(e)}', ) continue local_asset: Optional[Asset] = None try: local_asset = Asset(remote_asset_data.identifier) except UnknownAsset: pass try: executeall(cursor, action) if local_asset is not None: AssetResolver().clean_memory_cache( local_asset.identifier.lower()) except sqlite3.Error: # https://docs.python.org/3/library/sqlite3.html#exceptions if local_asset is None: try: # if asset is not known then simply do an insertion executeall(cursor, full_insert) except sqlite3.Error as e: self.msg_aggregator.add_warning( f'Failed to add asset {remote_asset_data.identifier} in the ' f'DB during the v{version} assets update. Skipping entry. ' f'Error: {str(e)}', ) continue # fail or succeed continue to next entry # otherwise asset is known, so it's a conflict. Check if we can resolve resolution = conflicts.get(local_asset) if conflicts else None if resolution == 'local': # do nothing, keep local continue if resolution == 'remote': try: _force_remote(cursor, local_asset, full_insert) except sqlite3.Error as e: self.msg_aggregator.add_warning( f'Failed to resolve conflict for {remote_asset_data.identifier} in ' f'the DB during the v{version} assets update. Skipping entry. ' f'Error: {str(e)}', ) continue # fail or succeed continue to next entry # else can't resolve. Mark it for the user to resolve. local_data = AssetResolver().get_asset_data( local_asset.identifier, False) self.conflicts.append((local_data, remote_asset_data)) # special case upgrade that should be temporary, until we make non-asset specific # update lines possible in our update mechanism: # https://github.com/rotki/assets/pull/49 if version == 7: cursor.execute( 'UPDATE ethereum_tokens SET decimals=18 WHERE protocol=="balancer";', ) # at the very end update the current version in the DB cursor.execute( 'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)', (ASSETS_VERSION_KEY, str(version)), )
def test_coingecko_identifiers_are_reachable(): """ Test that all assets have a coingecko entry and that all the identifiers exist in coingecko """ coins_delisted_from_coingecko = [ '1SG', 'FLUZ', 'EBCH', 'GOLOS', 'NPER', 'BLN', 'ADN', 'PIX', 'MTC-2', 'LKY', 'ARB', 'BBI', 'BITCAR', 'BTR', 'OLE', 'ROC', 'VIN', 'FIH', 'WIN-2', 'ADH', 'AUR', 'BAS', 'BYC', 'DGS', 'GMT', 'HST', 'INS', 'IPSX', 'SHP', 'WDC', 'BOST', 'FND', 'LDC', 'ORI', 'RIPT', 'SGR', 'LOCUS', 'REDC', 'SGN', 'SOAR', 'YUP', 'AC', 'APIS', 'BITPARK', 'CO2', 'DAN', 'DEC', 'DLT', 'DROP', 'ERD', 'ETBS', 'GEN', 'STP', 'SYNC', 'TBT', 'TNT', 'WIC', 'XCN', 'XTP', 'FREC', 'PTC', 'ACC-3', 'J8T', 'MRK', 'TTV', ] coingecko = Coingecko() all_coins = coingecko.all_coins() for identifier, asset_data in AssetResolver().assets.items(): if identifier in coins_delisted_from_coingecko: # data = coingecko.asset_data(Asset(identifier)) # Figure out if the removed assets can still be queried # for historical prices. If not, then remove their mapping from # all_assets.json and remove them from this tests. continue asset_type = asset_type_mapping[asset_data['type']] if asset_type == AssetType.FIAT: continue found = True coingecko_str = asset_data.get('coingecko', None) msg = f'Asset {identifier} does not have a coingecko entry' assert coingecko_str is not None, msg if coingecko_str != '': found = False for entry in all_coins: if coingecko_str == entry['id']: found = True break suggestions = [] if not found: for entry in all_coins: if entry['symbol'].upper() == asset_data['symbol'].upper(): suggestions.append( (entry['id'], entry['name'], entry['symbol'])) continue if entry['name'].upper() == asset_data['symbol'].upper(): suggestions.append( (entry['id'], entry['name'], entry['symbol'])) continue msg = f'Asset {identifier} coingecko mapping does not exist.' if len(suggestions) != 0: for s in suggestions: msg += f'\nSuggestion: id:{s[0]} name:{s[1]} symbol:{s[2]}' if not found: test_warnings.warn(UserWarning(msg))