def calculate_asset_details(self) -> Dict[Asset, Tuple[FVal, FVal]]: """ Calculates what amount of all assets has been untouched for a year and is hence tax-free and also the average buy price for each asset""" self.details: Dict[Asset, Tuple[FVal, FVal]] = dict() now = ts_now() for asset, events in self.events.items(): tax_free_amount_left = ZERO amount_sum = ZERO average = ZERO for buy_event in events.buys: if self.taxfree_after_period is not None: if buy_event.timestamp + self.taxfree_after_period < now: tax_free_amount_left += buy_event.amount amount_sum += buy_event.amount average += buy_event.amount * buy_event.rate if amount_sum == ZERO: self.details[asset] = (ZERO, ZERO) else: self.details[asset] = (tax_free_amount_left, average / amount_sum) return self.details
def process_history(self, start_ts, end_ts): ( error_or_empty, history, margin_history, loan_history, asset_movements, eth_transactions ) = self.trades_historian.get_history( start_ts=0, # For entire history processing we need to have full history available end_ts=ts_now(), end_at_least_ts=end_ts ) result = self.accountant.process_history( start_ts, end_ts, history, margin_history, loan_history, asset_movements, eth_transactions ) return result, error_or_empty
def upload_data_to_server(self): log.debug('upload to server -- start') data, our_hash = self.data.compress_and_encrypt_db(self.password) success, result_or_error = self.premium.query_last_data_metadata() if not success: log.debug( 'upload to server -- query last metadata failed', error=result_or_error, ) return log.debug( 'CAN_PUSH', ours=our_hash, theirs=result_or_error['data_hash'], ) if our_hash == result_or_error['data_hash']: log.debug('upload to server -- same hash') # same hash -- no need to upload anything return our_last_write_ts = self.data.db.get_last_write_ts() if our_last_write_ts <= result_or_error['last_modify_ts']: # Server's DB was modified after our local DB log.debug("CAN_PUSH -> 3") log.debug('upload to server -- remote db more recent than local') return success, result_or_error = self.premium.upload_data( data, our_hash, our_last_write_ts, 'zlib') if not success: log.debug('upload to server -- upload error', error=result_or_error) return self.last_data_upload_ts = ts_now() log.debug('upload to server -- success')
def test_writting_fetching_data(data_dir, username): data = DataHandler(data_dir) data.unlock(username, '123', create_new=True) tokens = ['GNO', 'RDN'] data.write_owned_eth_tokens(tokens) result = data.db.get_owned_tokens() assert set(tokens) == set(result) data.add_blockchain_account('BTC', '1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS') data.add_blockchain_account('ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D') data.add_blockchain_account('ETH', '0x80b369799104a47e98a553f3329812a44a7facdc') accounts = data.db.get_blockchain_accounts() assert isinstance(accounts, BlockchainAccounts) assert accounts.btc == ['1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS'] assert set(accounts.eth) == set([ '0xd36029d76af6fE4A356528e4Dc66B2C18123597D', to_checksum_address('0x80b369799104a47e98a553f3329812a44a7facdc') ]) # Add existing account should fail with pytest.raises(sqlcipher.IntegrityError): # pylint: disable=no-member data.add_blockchain_account( 'ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D') # Remove non-existing account with pytest.raises(InputError): data.remove_blockchain_account( 'ETH', '0x136029d76af6fE4A356528e4Dc66B2C18123597D') # Remove existing account data.remove_blockchain_account( 'ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D') accounts = data.db.get_blockchain_accounts() assert accounts.eth == [ to_checksum_address('0x80b369799104a47e98a553f3329812a44a7facdc') ] result, _ = data.add_ignored_asset('DAO') assert result result, _ = data.add_ignored_asset('DOGE') assert result result, _ = data.add_ignored_asset('DOGE') assert not result assert set(data.db.get_ignored_assets()) == set(['DAO', 'DOGE']) result, _ = data.remove_ignored_asset('XXX') assert not result result, _ = data.remove_ignored_asset('DOGE') assert result assert data.db.get_ignored_assets() == ['DAO'] # With nothing inserted in settings make sure default values are returned result = data.db.get_settings() last_write_diff = ts_now() - result['last_write_ts'] # make sure last_write was within 3 secs assert last_write_diff >= 0 and last_write_diff < 3 del result['last_write_ts'] assert result == { 'historical_data_start': DEFAULT_START_DATE, 'eth_rpc_port': '8545', 'ui_floating_precision': DEFAULT_UI_FLOATING_PRECISION, 'db_version': ROTKEHLCHEN_DB_VERSION, 'include_crypto2crypto': DEFAULT_INCLUDE_CRYPTO2CRYPTO, 'include_gas_costs': DEFAULT_INCLUDE_GAS_COSTS, 'taxfree_after_period': YEAR_IN_SECONDS, 'balance_save_frequency': DEFAULT_BALANCE_SAVE_FREQUENCY, 'last_balance_save': 0, 'main_currency': DEFAULT_MAIN_CURRENCY, 'anonymized_logs': DEFAULT_ANONYMIZED_LOGS, } # Check setting non-existing settings. Should be ignored success, msg = data.set_settings({'nonexisting_setting': 1}, accountant=None) assert success assert msg != '' and 'nonexisting_setting' in msg _, msg = data.set_settings( { 'nonexisting_setting': 1, 'eth_rpc_port': '8555', 'ui_floating_precision': 3, }, accountant=None) assert msg != '' and 'nonexisting_setting' in msg # Now check nothing funny made it in the db result = data.db.get_settings() assert result['eth_rpc_port'] == '8555' assert result['ui_floating_precision'] == 3 assert 'nonexisting_setting' not in result
def update_last_data_upload_ts(self) -> None: cursor = self.conn.cursor() cursor.execute( 'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)', ('last_data_upload_ts', str(ts_now()))) self.conn.commit()
def test_writting_fetching_data(data_dir, username): data = DataHandler(data_dir) data.unlock(username, '123', create_new=True) tokens = ['GNO', 'RDN'] data.write_owned_eth_tokens(tokens) result = data.db.get_owned_tokens() assert set(tokens) == set(result) data.add_blockchain_account('BTC', '1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS') data.add_blockchain_account('ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D') data.add_blockchain_account('ETH', '0x80b369799104a47e98a553f3329812a44a7facdc') accounts = data.db.get_blockchain_accounts() assert len(accounts) == 2 assert accounts['BTC'] == ['1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS'] assert set(accounts['ETH']) == set([ '0xd36029d76af6fE4A356528e4Dc66B2C18123597D', '0x80b369799104a47e98a553f3329812a44a7facdc' ]) # Add existing account should fail with pytest.raises(sqlcipher.IntegrityError): data.add_blockchain_account( 'ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D') # Remove non-existing account with pytest.raises(InputError): data.remove_blockchain_account( 'ETH', '0x136029d76af6fE4A356528e4Dc66B2C18123597D') # Remove existing account data.remove_blockchain_account( 'ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D') accounts = data.db.get_blockchain_accounts() assert accounts['ETH'] == ['0x80b369799104a47e98a553f3329812a44a7facdc'] result, _ = data.add_ignored_asset('DAO') assert result result, _ = data.add_ignored_asset('DOGE') assert result result, _ = data.add_ignored_asset('DOGE') assert not result assert set(data.db.get_ignored_assets()) == set(['DAO', 'DOGE']) result, _ = data.remove_ignored_asset('XXX') assert not result result, _ = data.remove_ignored_asset('DOGE') assert result assert data.db.get_ignored_assets() == ['DAO'] # With nothing inserted in settings make sure default values are returned result = data.db.get_settings() last_write_diff = ts_now() - result['last_write_ts'] # make sure last_write was within 3 secs assert last_write_diff >= 0 and last_write_diff < 3 del result['last_write_ts'] assert result == { 'historical_data_start': DEFAULT_START_DATE, 'eth_rpc_port': '8545', 'ui_floating_precision': DEFAULT_UI_FLOATING_PRECISION, 'db_version': ROTKEHLCHEN_DB_VERSION } # Check setting non-existing settings. Should be ignored _, msg = data.set_settings({'nonexisting_setting': 1}, accountant=None) assert msg != '' and 'nonexisting_setting' in msg _, msg = data.set_settings( { 'nonexisting_setting': 1, 'eth_rpc_port': '8555', 'ui_floating_precision': 3, }, accountant=None) assert msg != '' and 'nonexisting_setting' in msg # Now check nothing funny made it in the db result = data.db.get_settings() assert result['eth_rpc_port'] == '8555' assert result['ui_floating_precision'] == 3 assert 'nonexisting_setting' not in result
def __init__(self, data_directory, history_date_start): self.data_directory = data_directory # get the start date for historical data self.historical_data_start = createTimeStamp(history_date_start, formatstr="%d/%m/%Y") self.price_history = dict() # TODO: Check if historical data is after the requested start date # Check the data folder and load any cached history prefix = os.path.join(self.data_directory, 'price_history_') regex = re.compile(prefix + '(.*)\.json') files_list = glob.glob(prefix + '*.json') for file_ in files_list: match = regex.match(file_) assert match cache_key = match.group(1) with open(file_, 'rb') as f: data = rlk_jsonloads(f.read()) self.price_history[cache_key] = data # Get coin list of crypto compare invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cryptocompare_coinlist.json') if os.path.isfile(coinlist_cache_path): with open(coinlist_cache_path, 'rb') as f: try: data = rlk_jsonloads(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and its' over a month old then requery cryptocompare if data['time'] < now and now - data['time'] > 2629800: invalidate_cache = True data = data['data'] except JSONDecodeError: invalidate_cache = True if invalidate_cache: query_string = 'https://www.cryptocompare.com/api/data/coinlist/' resp = request_get(query_string) if 'Response' not in resp or resp['Response'] != 'Success': error_message = 'Failed to query cryptocompare for: "{}"'.format( query_string) if 'Message' in resp: error_message += ". Error: {}".format(resp['Message']) raise ValueError(error_message) data = resp['Data'] # Also save the cache with open(coinlist_cache_path, 'w') as f: write_data = {'time': ts_now(), 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = data['data'] self.cryptocompare_coin_list = data # For some reason even though price for the following assets is returned # it's not in the coinlist so let's add them here. self.cryptocompare_coin_list['DAO'] = object() self.cryptocompare_coin_list['USDT'] = object()
def __init__(self, data_directory, history_date_start, inquirer): self.data_directory = data_directory # get the start date for historical data self.historical_data_start = createTimeStamp(history_date_start, formatstr="%d/%m/%Y") self.inquirer = inquirer self.price_history = dict() self.price_history_file = dict() # Check the data folder and remember the filenames of any cached history prefix = os.path.join(self.data_directory, 'price_history_') prefix = prefix.replace('\\', '\\\\') regex = re.compile(prefix + '(.*)\\.json') files_list = glob.glob(prefix + '*.json') for file_ in files_list: match = regex.match(file_) assert match cache_key = match.group(1) self.price_history_file[cache_key] = file_ # Get coin list of crypto compare invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cryptocompare_coinlist.json') if os.path.isfile(coinlist_cache_path): log.info('Found coinlist cache', path=coinlist_cache_path) with open(coinlist_cache_path, 'rb') as f: try: data = rlk_jsonloads(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and its' over a month old then requery cryptocompare if data['time'] < now and now - data['time'] > 2629800: log.info('Coinlist cache is now invalidated') invalidate_cache = True data = data['data'] except JSONDecodeError: invalidate_cache = True if invalidate_cache: query_string = 'https://www.cryptocompare.com/api/data/coinlist/' log.debug('Querying cryptocompare', url=query_string) resp = request_get(query_string) if 'Response' not in resp or resp['Response'] != 'Success': error_message = 'Failed to query cryptocompare for: "{}"'.format( query_string) if 'Message' in resp: error_message += ". Error: {}".format(resp['Message']) log.error('Cryptocompare query failure', url=query_string, error=error_message) raise ValueError(error_message) data = resp['Data'] # Also save the cache with open(coinlist_cache_path, 'w') as f: now = ts_now() log.info('Writting coinlist cache', timestamp=now) write_data = {'time': now, 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = data['data'] self.cryptocompare_coin_list = data # For some reason even though price for the following assets is returned # it's not in the coinlist so let's add them here. self.cryptocompare_coin_list['DAO'] = object() self.cryptocompare_coin_list['USDT'] = object()
def make_random_timestamp(start=1451606400, end=None): if end is None: end = ts_now() return random.randint(start, end)