def test_upgrade_sqlcipher_v3_to_v4_with_dbinfo(data_dir): sqlcipher_version = detect_sqlcipher_version() if sqlcipher_version != 4: # nothing to test return username = '******' userdata_dir = os.path.join(data_dir, username) os.mkdir(userdata_dir) # get the v3 database file and copy it into the user's data directory dir_path = os.path.dirname(os.path.realpath(__file__)) copyfile( os.path.join(os.path.dirname(dir_path), 'data', 'sqlcipher_v3_rotkehlchen.db'), os.path.join(userdata_dir, 'rotkehlchen.db'), ) dbinfo = { 'sqlcipher_version': 3, 'md5_hash': '20c910c28ca42370e4a5f24d6d4a73d2' } with open(os.path.join(userdata_dir, DBINFO_FILENAME), 'w') as f: f.write(rlk_jsondumps(dbinfo)) # the constructor should migrate it in-place and we should have a working DB msg_aggregator = MessagesAggregator() db = DBHandler(userdata_dir, '123', msg_aggregator) assert db.get_version() == ROTKEHLCHEN_DB_VERSION
def _save_cached_price( self, from_asset: Asset, to_asset: Asset, date: str, price: Price, ) -> None: price_history_dir = get_or_make_price_history_dir(self.data_directory) filename = ( price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX }{from_asset.identifier}_{to_asset.identifier}.json' ) data: Dict[str, Price] = {} if filename.is_file(): with open(filename, 'r') as f: try: data = rlk_jsonloads_dict(f.read()) except JSONDecodeError: data = {} if not isinstance(data, dict): data = {} data[date] = price with open(filename, 'w') as outfile: outfile.write(rlk_jsondumps(data))
def get_cryptocyrrency_map(self) -> List[Dict[str, Any]]: # TODO: Both here and in cryptocompare the cache funcionality is the same # Extract the caching part into its own function somehow and abstract it # away invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cmc_coinlist.json') if os.path.isfile(coinlist_cache_path): log.info('Found coinmarketcap coinlist cache', path=coinlist_cache_path) with open(coinlist_cache_path, 'r') as f: try: file_data = jsonloads_dict(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and it's over a month old then requery coinmarketcap if file_data['time'] < now and now - file_data['time'] > 2629800: log.info('Coinmarketcap coinlist cache is now invalidated') invalidate_cache = True except JSONDecodeError: invalidate_cache = True if invalidate_cache: data = self._get_cryptocyrrency_map() # Also save the cache with open(coinlist_cache_path, 'w') as f: now = ts_now() log.info('Writing coinmarketcap coinlist cache', timestamp=now) write_data = {'time': now, 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = file_data['data'] return data
def save_historical_forex_data() -> None: instance = Inquirer() # Make price history directory if it does not exist price_history_dir = get_or_make_price_history_dir(instance._data_directory) filename = price_history_dir / 'price_history_forex.json' with open(filename, 'w') as outfile: outfile.write(rlk_jsondumps(instance._cached_forex_data))
def get_poap_airdrop_data(name: str, data_dir: Path) -> Dict[str, Any]: airdrops_dir = data_dir / 'airdrops_poap' airdrops_dir.mkdir(parents=True, exist_ok=True) filename = airdrops_dir / f'{name}.json' if not filename.is_file(): # if not cached, get it from the gist try: request = requests.get(url=POAP_AIRDROPS[name][0], timeout=DEFAULT_TIMEOUT_TUPLE) except requests.exceptions.RequestException as e: raise RemoteError( f'POAP airdrops Gist request failed due to {str(e)}') from e try: json_data = jsonloads_dict(request.content.decode('utf-8')) except JSONDecodeError as e: raise RemoteError( f'POAP airdrops Gist contains an invalid JSON {str(e)}') from e with open(filename, 'w') as outfile: outfile.write(rlk_jsondumps(json_data)) infile = open(filename, 'r') data_dict = jsonloads_dict(infile.read()) return data_dict
def __del__(self): self.disconnect() dbinfo = { 'sqlcipher_version': self.sqlcipher_version, 'md5_hash': self.get_md5hash() } with open(os.path.join(self.user_data_dir, DBINFO_FILENAME), 'w') as f: f.write(rlk_jsondumps(dbinfo))
def test_serialize_deserialize_trade(): trade = Trade( timestamp=Timestamp(1537985746), location=Location.KRAKEN, pair=TradePair('ETH_BTC'), trade_type=TradeType.SELL, amount=FVal('2.80'), rate=FVal('0.1234'), fee=FVal('0.01'), fee_currency=A_ETH, link='a link can be here', notes='notes can be here', ) serialized_trade = rlk_jsondumps(trade._asdict()) assert serialized_trade == rlk_jsondumps(raw_trade2) deserialized_trade = deserialize_trade(raw_trade2) assert deserialized_trade == trade
def test_encoding(): data = {'a': 3.14, 'b': 5, 'c': 'foo', 'd': '5.42323143', 'e': {'u1': '3.221'}, 'f': [2.1, 'boo', 3, '4.2324']} strdata = rlk_jsondumps(data) # stupid test, as it will fail if different python version is used. Should just # have used decoding again to make sure they are the same but was lazy assert strdata == ( '{"a": 3.14, "b": 5, "c": "foo", "d": "5.42323143", ' '"e": {"u1": "3.221"}, "f": [2.1, "boo", 3, "4.2324"]}' )
def write_history_data_in_file(data, filepath, start_ts, end_ts): log.info( 'Writing history file', filepath=filepath, start_time=start_ts, end_time=end_ts, ) with open(filepath, 'w') as outfile: history_dict = dict() history_dict['data'] = data history_dict['start_time'] = start_ts history_dict['end_time'] = end_ts outfile.write(rlk_jsondumps(history_dict))
def serialize_for_db(self, ts_converter: Callable[[Timestamp], str]) -> str: """May raise: - DeserializationError if something fails during conversion to the DB tuple """ json_data = self.serialize_to_dict(ts_converter) try: string_data = rlk_jsondumps(json_data) except (OverflowError, ValueError, TypeError) as e: raise DeserializationError( f'Could not dump json to string for NamedJson. Error was {str(e)}', ) from e return string_data
def update_trades_cache( self, data: Union[List[Any], Dict[str, Any]], start_ts: Timestamp, end_ts: Timestamp, special_name: Optional[str] = None, ) -> None: trades_file = self._get_cachefile_name(special_name) trades: Dict[str, Union[Timestamp, List[Any], Dict[str, Any]]] = dict() with open(trades_file, 'w') as f: trades['start_time'] = start_ts trades['end_time'] = end_ts trades['data'] = data f.write(rlk_jsondumps(trades))
def to_db_tuple(self) -> NamedJsonDBTuple: """May raise: - DeserializationError if something fails during conversion to the DB tuple """ event_type = self.event_type.serialize_for_db() try: string_data = rlk_jsondumps(self.data) except (OverflowError, ValueError, TypeError) as e: raise DeserializationError( f'Could not dump json to string for NamedJson. Error was {str(e)}', ) from e return event_type, string_data
def write_history_data_in_file( data: List[Dict[str, Any]], filepath: FilePath, start_ts: Timestamp, end_ts: Timestamp, ) -> None: log.info( 'Writing history file', filepath=filepath, start_time=start_ts, end_time=end_ts, ) with open(filepath, 'w') as outfile: history_dict: Dict[str, Any] = {} history_dict['data'] = data history_dict['start_time'] = start_ts history_dict['end_time'] = end_ts outfile.write(rlk_jsondumps(history_dict))
def write_history_data_in_file( data: List[Dict[str, Any]], filepath: Path, start_ts: Timestamp, end_ts: Timestamp, ) -> None: log.info( 'Writing history file', filepath=filepath, start_time=start_ts, end_time=end_ts, ) with open(filepath, 'w') as outfile: history_dict: Dict[str, Any] = {} # From python 3.5 dict order should be preserved so we can expect # start and end time to come before the data in the file history_dict['start_time'] = start_ts history_dict['end_time'] = end_ts history_dict['data'] = data outfile.write(rlk_jsondumps(history_dict))
def test_rlk_jsondumps(): result = rlk_jsondumps(data) assert result == ( '{"a": "5.4", "b": "foo", "c": "32.1", "d": 5, ' '"e": [1, "a", "5.1"], "f": "ETH", "BTC": "test_with_asset_key"}')
def all_coins(self) -> Dict[str, Any]: """ Gets the list of all the cryptocompare coins May raise: - RemoteError if there is a problem reaching the cryptocompare server or with reading the response returned by the server """ # Get coin list of cryptocompare invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cryptocompare_coinlist.json') if os.path.isfile(coinlist_cache_path): log.info('Found cryptocompare coinlist cache', path=coinlist_cache_path) with open(coinlist_cache_path, 'r') as f: try: data = rlk_jsonloads_dict(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and its' over a month old then requery cryptocompare if data['time'] < now and now - data['time'] > 2629800: log.info( 'Cryptocompare coinlist cache is now invalidated') invalidate_cache = True data = data['data'] except JSONDecodeError: invalidate_cache = True if invalidate_cache: data = self._api_query('all/coinlist') # Also save the cache with open(coinlist_cache_path, 'w') as f: now = ts_now() log.info('Writing coinlist cache', timestamp=now) write_data = {'time': now, 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = data['data'] # As described in the docs # https://min-api.cryptocompare.com/documentation?key=Other&cat=allCoinsWithContentEndpoint # This is not the entire list of assets in the system, so I am manually adding # here assets I am aware of that they already have historical data for in thei # cryptocompare system data['DAO'] = object() data['USDT'] = object() data['VEN'] = object() data['AIR*'] = object() # This is Aircoin # This is SpendCoin (https://coinmarketcap.com/currencies/spendcoin/) data['SPND'] = object() # This is eBitcoinCash (https://coinmarketcap.com/currencies/ebitcoin-cash/) data['EBCH'] = object() # This is Educare (https://coinmarketcap.com/currencies/educare/) data['EKT'] = object() # This is Knoxstertoken (https://coinmarketcap.com/currencies/knoxstertoken/) data['FKX'] = object() # This is FNKOS (https://coinmarketcap.com/currencies/fnkos/) data['FNKOS'] = object() # This is FansTime (https://coinmarketcap.com/currencies/fanstime/) data['FTI'] = object() # This is Gene Source Code Chain # (https://coinmarketcap.com/currencies/gene-source-code-chain/) data['GENE*'] = object() # This is GazeCoin (https://coinmarketcap.com/currencies/gazecoin/) data['GZE'] = object() # This is probaly HarmonyCoin (https://coinmarketcap.com/currencies/harmonycoin-hmc/) data['HMC*'] = object() # This is IoTChain (https://coinmarketcap.com/currencies/iot-chain/) data['ITC'] = object() # This is Luna Coin (https://coinmarketcap.com/currencies/luna-coin/) data['LUNA'] = object # This is MFTU (https://coinmarketcap.com/currencies/mainstream-for-the-underground/) data['MFTU'] = object() # This is Nexxus (https://coinmarketcap.com/currencies/nexxus/) data['NXX'] = object() # This is Owndata (https://coinmarketcap.com/currencies/owndata/) data['OWN'] = object() # This is PiplCoin (https://coinmarketcap.com/currencies/piplcoin/) data['PIPL'] = object() # This is PKG Token (https://coinmarketcap.com/currencies/pkg-token/) data['PKG'] = object() # This is Quibitica https://coinmarketcap.com/currencies/qubitica/ data['QBIT'] = object() # This is DPRating https://coinmarketcap.com/currencies/dprating/ data['RATING'] = object() # This is RouletteToken https://coinmarketcap.com/currencies/roulettetoken/ data['RLT'] = object() # This is RocketPool https://coinmarketcap.com/currencies/rocket-pool/ data['RPL'] = object() # This is SpeedMiningService (https://coinmarketcap.com/currencies/speed-mining-service/) data['SMS'] = object() # This is SmartShare (https://coinmarketcap.com/currencies/smartshare/) data['SSP'] = object() # This is ThoreCoin (https://coinmarketcap.com/currencies/thorecoin/) data['THR'] = object() # This is Transcodium (https://coinmarketcap.com/currencies/transcodium/) data['TNS'] = object() return data
root_dir = Path(__file__).resolve().parent.parent.parent ASSETS_FILE = Path(f'{root_dir}/rotkehlchen/data/all_assets.json') with open(ASSETS_FILE, 'r') as f: assets = json.loads(f.read()) data_dir = default_data_directory() coingecko = Coingecko(data_directory=data_dir) COINGECKO_COINS_FILE = data_dir / 'coingecko.json' if COINGECKO_COINS_FILE.exists(): with open(COINGECKO_COINS_FILE, 'r') as f: coingecko_coins = json.loads(f.read()) else: coingecko_coins = coingecko.all_coins() with open(COINGECKO_COINS_FILE, 'w') as f: f.write(rlk_jsondumps(coingecko_coins)) coingecko_add = { 'FTT': 'farmatrust', 'SNX': 'synthetix-network-token', '0xBTC': '0xbitcoin', '1SG': '1sg', '1ST': 'first-blood', '1WO': '1world', '2GIVE': '2give', 'ABBC': 'abbc', 'ACC-3': 'accelerator-network', 'ARB': 'arbitrage', 'ARB-2': 'arbit-coin', 'ARC': 'advanced-technology-coin', 'ATX': 'aston',
def save_historical_forex_data() -> None: instance = Inquirer() filename = os.path.join(instance._data_directory, 'price_history_forex.json') with open(filename, 'w') as outfile: outfile.write(rlk_jsondumps(instance._cached_forex_data))