def main(): arg_parser = aggregator_args() args = arg_parser.parse_args() msg_aggregator = MessagesAggregator() user_data_dir = Path(default_data_directory()) / args.db_user database = DBHandler( user_data_dir=user_data_dir, password=args.db_password, msg_aggregator=msg_aggregator, ) our_data = AssetResolver().assets paprika = CoinPaprika() cmc = None cmc_list = None root_path = os.path.dirname( os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) data_directory = f'{Path.home()}/.rotkehlchen' if args.cmc_api_key: cmc = Coinmarketcap( data_directory=data_directory, api_key=args.cmc_api_key, ) cmc_list = cmc.get_cryptocyrrency_map() cryptocompare = Cryptocompare(data_directory=data_directory, database=database) paprika_coins_list = paprika.get_coins_list() cryptocompare_coins_map = cryptocompare.all_coins() if args.input_file: if not os.path.isfile(args.input_file): print(f'Given input file {args.input_file} is not a file') sys.exit(1) with open(args.input_file, 'r') as f: input_data = rlk_jsonloads(f.read()) given_symbols = set(input_data.keys()) current_symbols = set(our_data.keys()) if not given_symbols.isdisjoint(current_symbols): print( f'The following given input symbols already exist in the ' f'all_assets.json file {given_symbols.intersection(current_symbols)}', ) sys.exit(1) # If an input file is given, iterate only its assets and perform checks for asset_symbol in input_data.keys(): input_data = process_asset( our_data=input_data, asset_symbol=asset_symbol, paprika_coins_list=paprika_coins_list, paprika=paprika, cmc_list=cmc_list, cryptocompare_coins_map=cryptocompare_coins_map, always_keep_our_time=args.always_keep_our_time, ) # and now combine the two dictionaries to get the final one. Note that no # checks are perfomed for what was in all_assets.json before the script # ran in this case our_data = {**our_data, **input_data} else: # Iterate all of the assets of the all_assets.json file and perform checks for asset_symbol in our_data.keys(): our_data = process_asset( our_data=our_data, asset_symbol=asset_symbol, paprika_coins_list=paprika_coins_list, paprika=paprika, cmc_list=cmc_list, cryptocompare_coins_map=cryptocompare_coins_map, always_keep_our_time=args.always_keep_our_time, ) # Finally overwrite the all_assets.json with the modified assets with open( os.path.join(root_path, 'rotkehlchen', 'data', 'all_assets.json'), 'w') as f: f.write(json.dumps( our_data, sort_keys=True, indent=4, ), )
def main(): arg_parser = aggregator_args() args = arg_parser.parse_args() our_data = AssetResolver().assets paprika = CoinPaprika() cmc = None cmc_list = None root_path = os.path.dirname( os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) data_directory = f'{Path.home()}/.rotkehlchen' if args.cmc_api_key: cmc = Coinmarketcap( data_directory=data_directory, api_key=args.cmc_api_key, ) cmc_list = cmc.get_cryptocyrrency_map() cryptocompare = Cryptocompare(data_directory=data_directory) paprika_coins_list = paprika.get_coins_list() cryptocompare_coins_map = cryptocompare.all_coins() with open( os.path.join(root_path, 'rotkehlchen', 'data', 'eth_tokens.json'), 'r') as f: token_data = rlk_jsonloads(f.read()) if args.input_file: if not os.path.isfile(args.input_file): print(f'Given input file {args.input_file} is not a file') sys.exit(1) with open(args.input_file, 'r') as f: input_data = rlk_jsonloads(f.read()) given_symbols = set(input_data.keys()) current_symbols = set(our_data.keys()) if not given_symbols.isdisjoint(current_symbols): print( f'The following given input symbols already exist in the ' f'all_assets.json file {given_symbols.intersection(current_symbols)}', ) sys.exit(1) # If an input file is given, iterate only its assets and perform checks for asset_symbol in input_data.keys(): input_data = process_asset( our_data=input_data, asset_symbol=asset_symbol, paprika_coins_list=paprika_coins_list, paprika=paprika, cmc_list=cmc_list, cryptocompare_coins_map=cryptocompare_coins_map, always_keep_our_time=args.always_keep_our_time, ) # and now combine the two dictionaries to get the final one. Note that no # checks are perfomed for what was in all_assets.json before the script # ran in this case our_data = {**our_data, **input_data} elif args.process_eth_tokens: start = 1240 stop_after = start + 6 input_data = {} for index, entry in enumerate(token_data[start:], start): token_symbol = entry['symbol'] # at least for now skip all already known tokens if ETH_TOKENS_JSON_TO_WORLD.get(token_symbol, token_symbol) in our_data: print( f"Skipping ETH token {token_symbol} since it's already known" ) else: input_data = process_asset( our_data=input_data, asset_symbol=token_symbol, paprika_coins_list=paprika_coins_list, paprika=paprika, cmc_list=cmc_list, cryptocompare_coins_map=cryptocompare_coins_map, always_keep_our_time=args.always_keep_our_time, token_entry=entry, ) if index >= stop_after: break # and now combine the two dictionaries to get the final one. Note that no # checks are perfomed for what was in all_assets.json before the script # ran in this case our_data = {**our_data, **input_data} else: # Iterate all of the assets of the all_assets.json file and perform checks for asset_symbol in our_data.keys(): our_data = process_asset( our_data=our_data, asset_symbol=asset_symbol, paprika_coins_list=paprika_coins_list, paprika=paprika, cmc_list=cmc_list, cryptocompare_coins_map=cryptocompare_coins_map, always_keep_our_time=args.always_keep_our_time, ) # Make sure that our data have the ethereum address and decimals from eth_tokens.json asset_type = our_data[asset_symbol]['type'] if 'ethereum token' not in asset_type: continue eth_token_symbol = WORLD_TO_ETH_TOKENS_JSON.get( asset_symbol, asset_symbol) data = find_token_data(token_data, eth_token_symbol) if not data: print(f'Missing token data for {asset_symbol} ... sadness :(') sys.exit(1) our_data[asset_symbol]['ethereum_address'] = data['address'] our_data[asset_symbol]['ethereum_token_decimals'] = data['decimal'] # Finally overwrite the all_assets.json with the modified assets with open( os.path.join(root_path, 'rotkehlchen', 'data', 'all_assets.json'), 'w') as f: f.write(json.dumps( our_data, sort_keys=True, indent=4, ), )