def __init__(self, data_directory, history_date_start=DEFAULT_START_DATE): self.data_directory = data_directory # get the start date for historical data self.historical_data_start = createTimeStamp(history_date_start, formatstr="%d/%m/%Y") self.price_history = dict() # TODO: Check if historical data is after the requested start date # Check the data folder and load any cached history prefix = os.path.join(self.data_directory, 'price_history_') regex = re.compile(prefix + '(.*)\.json') files_list = glob.glob(prefix + '*.json') for file_ in files_list: match = regex.match(file_) assert match cache_key = match.group(1) with open(file_, 'rb') as f: data = rlk_jsonloads(f.read()) self.price_history[cache_key] = data # Get coin list of crypto compare invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cryptocompare_coinlist.json') if os.path.isfile(coinlist_cache_path): with open(coinlist_cache_path, 'rb') as f: try: data = rlk_jsonloads(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and its' over a month old then requery cryptocompare if data['time'] < now and now - data['time'] > 2629800: invalidate_cache = True data = data['data'] except JSONDecodeError: invalidate_cache = True if invalidate_cache: query_string = 'https://www.cryptocompare.com/api/data/coinlist/' resp = urlopen(Request(query_string)) resp = rlk_jsonloads(resp.read()) if 'Response' not in resp or resp['Response'] != 'Success': error_message = 'Failed to query cryptocompare for: "{}"'.format(query_string) if 'Message' in resp: error_message += ". Error: {}".format(resp['Message']) raise ValueError(error_message) data = resp['Data'] # Also save the cache with open(coinlist_cache_path, 'w') as f: write_data = {'time': ts_now(), 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = data['data'] self.cryptocompare_coin_list = data # For some reason even though price for the following assets is returned # it's not in the coinlist so let's add them here. self.cryptocompare_coin_list['DAO'] = object() self.cryptocompare_coin_list['USDT'] = object()
def test_upgrade_sqlcipher_v3_to_v4_with_dbinfo(data_dir): sqlcipher_version = detect_sqlcipher_version() if sqlcipher_version != 4: # nothing to test return username = '******' userdata_dir = os.path.join(data_dir, username) os.mkdir(userdata_dir) # get the v3 database file and copy it into the user's data directory dir_path = os.path.dirname(os.path.realpath(__file__)) copyfile( os.path.join(dir_path, 'data', 'sqlcipher_v3_rotkehlchen.db'), os.path.join(userdata_dir, 'rotkehlchen.db'), ) dbinfo = { 'sqlcipher_version': 3, 'md5_hash': '20c910c28ca42370e4a5f24d6d4a73d2' } with open(os.path.join(userdata_dir, DBINFO_FILENAME), 'w') as f: f.write(rlk_jsondumps(dbinfo)) # the constructor should migrate it in-place and we should have a working DB db = DBHandler(userdata_dir, '123') assert db.get_version() == ROTKEHLCHEN_DB_VERSION
def write_history_data_in_file(data, filepath, start_ts, end_ts): with open(filepath, 'w') as outfile: history_dict = dict() history_dict['data'] = data history_dict['start_time'] = start_ts history_dict['end_time'] = end_ts outfile.write(rlk_jsondumps(history_dict))
def update_trades_cache(self, data, start_ts, end_ts, special_name=None): trades_file = self._get_cachefile_name(special_name) trades = dict() with open(trades_file, 'w') as f: trades['start_time'] = start_ts trades['end_time'] = end_ts trades['data'] = data f.write(rlk_jsondumps(trades))
def __del__(self): self.disconnect() dbinfo = { 'sqlcipher_version': self.sqlcipher_version, 'md5_hash': self.get_md5hash() } with open(os.path.join(self.user_data_dir, DBINFO_FILENAME), 'w') as f: f.write(rlk_jsondumps(dbinfo))
def test_encoding(): data = {'a': 3.14, 'b': 5, 'c': 'foo', 'd': '5.42323143', 'e': {'u1': '3.221'}, 'f': [2.1, 'boo', 3, '4.2324']} strdata = rlk_jsondumps(data) # stupid test, as it will fail if different python version is used. Should just # have used decoding again to make sure they are the same but was lazy assert strdata == ( '{"a": 3.14, "b": 5, "c": "foo", "d": "5.42323143", ' '"e": {"u1": "3.221"}, "f": [2.1, "boo", 3, "4.2324"]}' )
def update_trades_cache(self, data: Union[List, Dict], start_ts: typing.Timestamp, end_ts: typing.Timestamp, special_name: str = None) -> None: trades_file = self._get_cachefile_name(special_name) trades: Dict[str, Union[typing.Timestamp, List, Dict]] = dict() with open(trades_file, 'w') as f: trades['start_time'] = start_ts trades['end_time'] = end_ts trades['data'] = data f.write(rlk_jsondumps(trades))
def write_history_data_in_file(data, filepath, start_ts, end_ts): log.info( 'Writting history file', filepath=filepath, start_time=start_ts, end_time=end_ts, ) with open(filepath, 'w') as outfile: history_dict = dict() history_dict['data'] = data history_dict['start_time'] = start_ts history_dict['end_time'] = end_ts outfile.write(rlk_jsondumps(history_dict))
def __init__(self, data_directory, history_date_start): self.data_directory = data_directory # get the start date for historical data self.historical_data_start = createTimeStamp(history_date_start, formatstr="%d/%m/%Y") self.price_history = dict() self.price_history_file = dict() # Check the data folder and remember the filenames of any cached history prefix = os.path.join(self.data_directory, 'price_history_') prefix = prefix.replace('\\', '\\\\') regex = re.compile(prefix + '(.*)\\.json') files_list = glob.glob(prefix + '*.json') for file_ in files_list: match = regex.match(file_) assert match cache_key = match.group(1) self.price_history_file[cache_key] = file_ # Get coin list of crypto compare invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cryptocompare_coinlist.json') if os.path.isfile(coinlist_cache_path): log.info('Found coinlist cache', path=coinlist_cache_path) with open(coinlist_cache_path, 'rb') as f: try: data = rlk_jsonloads(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and its' over a month old then requery cryptocompare if data['time'] < now and now - data['time'] > 2629800: log.info('Coinlist cache is now invalidated') invalidate_cache = True data = data['data'] except JSONDecodeError: invalidate_cache = True if invalidate_cache: query_string = 'https://www.cryptocompare.com/api/data/coinlist/' log.debug('Querying cryptocompare', url=query_string) resp = request_get(query_string) if 'Response' not in resp or resp['Response'] != 'Success': error_message = 'Failed to query cryptocompare for: "{}"'.format( query_string) if 'Message' in resp: error_message += ". Error: {}".format(resp['Message']) log.error('Cryptocompare query failure', url=query_string, error=error_message) raise ValueError(error_message) data = resp['Data'] # Also save the cache with open(coinlist_cache_path, 'w') as f: now = ts_now() log.info('Writting coinlist cache', timestamp=now) write_data = {'time': now, 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = data['data'] self.cryptocompare_coin_list = data # For some reason even though price for the following assets is returned # it's not in the coinlist so let's add them here. self.cryptocompare_coin_list['DAO'] = object() self.cryptocompare_coin_list['USDT'] = object()
def set_settings(self, settings, accountant): self.settings = settings accountant.set_main_currency(settings['main_currency']) with open(os.path.join(self.data_directory, 'settings.json'), 'w') as f: f.write(rlk_jsondumps(self.settings))
def set_ui_floating_precision(self, val): self.settings['ui_floating_precision'] = val with open(os.path.join(self.data_directory, 'settings.json'), 'w') as f: f.write(rlk_jsondumps(self.settings))
def save_historical_forex_data(self) -> None: filename = os.path.join(self.data_directory, 'price_history_forex.json') with open(filename, 'w') as outfile: outfile.write(rlk_jsondumps(self.cached_forex_data))