'close': '2018-03-03 23:05:04', 'currency': 'DASH', # cryptocompare hourly DASH/EUR: 475.565 'fee': '0.0001', 'earned': '0.0025', 'amount': '2', }, ] asset_movements_list = [ AssetMovement( # before query period -- 8.915 * 0.001 = 8.915e-3 location=Location.KRAKEN, category=AssetMovementCategory.WITHDRAWAL, address=None, transaction_id=None, timestamp=Timestamp(1479510304), # 18/11/2016, asset=A_ETH, # cryptocompare hourly ETH/EUR: 8.915 amount=FVal('95'), fee_asset=A_ETH, fee=Fee(FVal('0.001')), link='krakenid1', ), AssetMovement( # 0.0087*52.885 = 0.4600995 location=Location.KRAKEN, category=AssetMovementCategory.WITHDRAWAL, address=None, transaction_id=None, timestamp=Timestamp(1493291104), # 27/04/2017, asset=A_ETH, # cryptocompare hourly ETH/EUR: 52.885 amount=FVal('125'), fee_asset=A_ETH,
API_SYSTEM_CLOCK_NOT_SYNCED_ERROR_CODE: 'Invalid timestamp. Is your system clock synced?', 400003: 'Invalid API key value.', 400004: 'Invalid API passphrase.', 400005: 'Invalid API secret.', 400007: 'Provided KuCoin API key needs to have "General" permission activated.', 411100: 'Contact KuCoin support to unfreeze your account', } API_PAGE_SIZE_LIMIT = 500 # Rate limit is 1800 requests per minute, exceed it multiple times the system # will restrict the IP API_REQUEST_RETRY_TIMES = 2 API_REQUEST_RETRIES_AFTER_SECONDS = 1 API_V2_TIMESTART = Timestamp(1550448000) # 2019-02-18T00:00:00Z API_V2_TIMESTART_MS = API_V2_TIMESTART * 1000 KUCOIN_LAUNCH_TS = Timestamp(1504224000) # 01/09/2017 class KucoinCase(Enum): API_KEY = 1 BALANCES = 2 TRADES = 3 OLD_TRADES = 4 DEPOSITS = 5 WITHDRAWALS = 6 def __str__(self) -> str: if self == KucoinCase.API_KEY: return 'api_key'
def get_history( self, start_ts: Timestamp, end_ts: Timestamp, has_premium: bool, ) -> HistoryResult: """Creates trades and loans history from start_ts to end_ts""" self._reset_variables() step = 0 total_steps = len(self.exchange_manager.connected_exchanges) + NUM_HISTORY_QUERY_STEPS_EXCL_EXCHANGES # noqa: E501 log.info( 'Get/create trade history', start_ts=start_ts, end_ts=end_ts, ) # start creating the all trades history list history: List[Union[Trade, MarginPosition, AMMTrade]] = [] asset_movements = [] loans = [] empty_or_error = '' def populate_history_cb( trades_history: List[Trade], margin_history: List[MarginPosition], result_asset_movements: List[AssetMovement], exchange_specific_data: Any, ) -> None: """This callback will run for succesfull exchange history query""" history.extend(trades_history) history.extend(margin_history) asset_movements.extend(result_asset_movements) if exchange_specific_data: # This can only be poloniex at the moment polo_loans_data = exchange_specific_data loans.extend(process_polo_loans( msg_aggregator=self.msg_aggregator, data=polo_loans_data, # We need to have history of loans since before the range start_ts=Timestamp(0), end_ts=end_ts, )) def fail_history_cb(error_msg: str) -> None: """This callback will run for failure in exchange history query""" nonlocal empty_or_error empty_or_error += '\n' + error_msg for name, exchange in self.exchange_manager.connected_exchanges.items(): self.processing_state_name = f'Querying {name} exchange history' exchange.query_history_with_callbacks( # We need to have history of exchanges since before the range start_ts=Timestamp(0), end_ts=end_ts, success_callback=populate_history_cb, fail_callback=fail_history_cb, ) step = self._increase_progress(step, total_steps) try: self.processing_state_name = 'Querying ethereum transactions history' eth_transactions = self.chain_manager.ethereum.transactions.query( addresses=None, # all addresses # We need to have history of transactions since before the range from_ts=Timestamp(0), to_ts=end_ts, with_limit=False, # at the moment ignore the limit for historical processing, recent_first=False, # for history processing we need oldest first ) except RemoteError as e: eth_transactions = [] msg = str(e) self.msg_aggregator.add_error( f'There was an error when querying etherscan for ethereum transactions: {msg}' f'The final history result will not include ethereum transactions', ) empty_or_error += '\n' + msg step = self._increase_progress(step, total_steps) # Include the external trades in the history self.processing_state_name = 'Querying external trades history' external_trades = self.db.get_trades( # We need to have history of trades since before the range from_ts=Timestamp(0), to_ts=end_ts, location=Location.EXTERNAL, ) history.extend(external_trades) step = self._increase_progress(step, total_steps) # include the ledger actions self.processing_state_name = 'Querying ledger actions history' ledger_actions, _ = self.query_ledger_actions(has_premium, from_ts=None, to_ts=end_ts) step = self._increase_progress(step, total_steps) # include AMM trades: balancer, uniswap for amm_location in AMMTradeLocations: amm_module_name = cast(AMMTRADE_LOCATION_NAMES, str(amm_location)) amm_module = self.chain_manager.get_module(amm_module_name) if has_premium and amm_module: self.processing_state_name = f'Querying {amm_module_name} trade history' amm_module_trades = amm_module.get_trades( addresses=self.chain_manager.queried_addresses_for_module(amm_module_name), from_timestamp=Timestamp(0), to_timestamp=end_ts, only_cache=False, ) history.extend(amm_module_trades) step = self._increase_progress(step, total_steps) # Include makerdao DSR gains defi_events = [] makerdao_dsr = self.chain_manager.get_module('makerdao_dsr') if makerdao_dsr and has_premium: self.processing_state_name = 'Querying makerDAO DSR history' defi_events.extend(makerdao_dsr.get_history_events( from_timestamp=Timestamp(0), # we need to process all events from history start to_timestamp=end_ts, )) step = self._increase_progress(step, total_steps) # Include makerdao vault events makerdao_vaults = self.chain_manager.get_module('makerdao_vaults') if makerdao_vaults and has_premium: self.processing_state_name = 'Querying makerDAO vaults history' defi_events.extend(makerdao_vaults.get_history_events( from_timestamp=Timestamp(0), # we need to process all events from history start to_timestamp=end_ts, )) step = self._increase_progress(step, total_steps) # include yearn vault events yearn_vaults = self.chain_manager.get_module('yearn_vaults') if yearn_vaults and has_premium: self.processing_state_name = 'Querying yearn vaults history' defi_events.extend(yearn_vaults.get_history_events( from_timestamp=Timestamp(0), # we need to process all events from history start to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module('yearn_vaults'), )) step = self._increase_progress(step, total_steps) # include compound events compound = self.chain_manager.get_module('compound') if compound and has_premium: self.processing_state_name = 'Querying compound history' defi_events.extend(compound.get_history_events( from_timestamp=Timestamp(0), # we need to process all events from history start to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module('compound'), )) step = self._increase_progress(step, total_steps) # include adex events adex = self.chain_manager.get_module('adex') if adex is not None and has_premium: self.processing_state_name = 'Querying adex staking history' defi_events.extend(adex.get_history_events( from_timestamp=start_ts, to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module('adex'), )) step = self._increase_progress(step, total_steps) # include aave events aave = self.chain_manager.get_module('aave') if aave is not None and has_premium: self.processing_state_name = 'Querying aave history' defi_events.extend(aave.get_history_events( from_timestamp=start_ts, to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module('aave'), )) self._increase_progress(step, total_steps) # include eth2 staking events if has_premium: self.processing_state_name = 'Querying ETH2 staking history' defi_events.extend(self.chain_manager.get_eth2_history_events( from_timestamp=start_ts, to_timestamp=end_ts, )) self._increase_progress(step, total_steps) history.sort(key=action_get_timestamp) return ( empty_or_error, history, loans, asset_movements, eth_transactions, defi_events, ledger_actions, )
def test_eth2_deposits_serialization(): addr1 = make_ethereum_address() addr2 = make_ethereum_address() deposits = [ Eth2Deposit( from_address=addr1, pubkey= '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 withdrawal_credentials= '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=9, tx_hash= '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', log_index=22, timestamp=Timestamp(int(1604506685)), ), Eth2Deposit( from_address=addr2, pubkey= '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3', # noqa: E501 withdrawal_credentials= '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=1650, tx_hash= '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7', log_index=221, timestamp=Timestamp(int(1605043544)), ), ] serialized = process_result_list(deposits) assert serialized == [ { 'from_address': addr1, 'pubkey': '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 'withdrawal_credentials': '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 'value': { 'amount': '32', 'usd_value': '64' }, 'validator_index': 9, 'tx_hash': '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', 'log_index': 22, 'timestamp': 1604506685, }, { 'from_address': addr2, 'pubkey': '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3', # noqa: E501 'withdrawal_credentials': '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817', # noqa: E501 'value': { 'amount': '32', 'usd_value': '64' }, 'validator_index': 1650, 'tx_hash': '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7', 'log_index': 221, 'timestamp': 1605043544, }, ]
def test_add_ethereum_transactions(data_dir, username): """Test that adding and retrieving ethereum transactions from the DB works fine. Also duplicates should be ignored and an error returned """ msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) tx1 = EthereumTransaction( tx_hash=b'1', timestamp=Timestamp(1451606400), block_number=1, from_address=ETH_ADDRESS1, to_address=ETH_ADDRESS3, value=FVal('2000000'), gas=FVal('5000000'), gas_price=FVal('2000000000'), gas_used=FVal('25000000'), input_data=MOCK_INPUT_DATA, nonce=1, ) tx2 = EthereumTransaction( tx_hash=b'2', timestamp=Timestamp(1451706400), block_number=3, from_address=ETH_ADDRESS2, to_address=ETH_ADDRESS3, value=FVal('4000000'), gas=FVal('5000000'), gas_price=FVal('2000000000'), gas_used=FVal('25000000'), input_data=MOCK_INPUT_DATA, nonce=1, ) tx3 = EthereumTransaction( tx_hash=b'3', timestamp=Timestamp(1452806400), block_number=5, from_address=ETH_ADDRESS3, to_address=ETH_ADDRESS1, value=FVal('1000000'), gas=FVal('5000000'), gas_price=FVal('2000000000'), gas_used=FVal('25000000'), input_data=MOCK_INPUT_DATA, nonce=3, ) # Add and retrieve the first 2 margins. All should be fine. data.db.add_ethereum_transactions([tx1, tx2], from_etherscan=True) errors = msg_aggregator.consume_errors() warnings = msg_aggregator.consume_warnings() assert len(errors) == 0 assert len(warnings) == 0 returned_transactions = data.db.get_ethereum_transactions() assert returned_transactions == [tx1, tx2] # Add the last 2 transactions. Since tx2 already exists in the DB it should be # ignored (no errors shown for attempting to add already existing transaction) data.db.add_ethereum_transactions([tx2, tx3], from_etherscan=True) errors = msg_aggregator.consume_errors() warnings = msg_aggregator.consume_warnings() assert len(errors) == 0 assert len(warnings) == 0 returned_transactions = data.db.get_ethereum_transactions() assert returned_transactions == [tx1, tx2, tx3]
class EthereumTransactionQuerySchema(Schema): async_query = fields.Boolean(missing=False) address = EthereumAddressField(missing=None) from_timestamp = TimestampField(missing=Timestamp(0)) to_timestamp = TimestampField(missing=ts_now)
# List of ADDR1, ADDR2 and ADDR3 deposit events from 1604506685 to 1605044577 # sorted by (timestamp, log_index). EXPECTED_DEPOSITS = [ Eth2Deposit( from_address=ADDR1, pubkey= '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 withdrawal_credentials= '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=9, tx_hash= '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', log_index=22, timestamp=Timestamp(int(1604506685)), ), Eth2Deposit( from_address=ADDR3, pubkey= '0x90b2f65cb43d9cdb2279af9f76010d667b9d8d72e908f2515497a7102820ce6bb15302fe2b8dc082fce9718569344ad8', # noqa: E501 withdrawal_credentials= '0x00a257d19e1650dec1ab59fc9e1cb9a9fc2fe7265b0f27e7d79ff61aeff0a1f0', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=993, tx_hash= '0x3403bd94a1bf185ee18a525499e408a1b9b7d801cff6418e31efda346762e754', log_index=266, timestamp=Timestamp(int(1604611131)), ), Eth2Deposit(
def add_starting_balances(datahandler) -> List[AssetBalance]: """Adds some starting balances and other data to a testing instance""" balances = [ AssetBalance( time=Timestamp(1488326400), asset=A_BTC, amount='1', usd_value='1222.66', ), AssetBalance( time=Timestamp(1488326400), asset=A_ETH, amount='10', usd_value='4517.4', ), AssetBalance( time=Timestamp(1488326400), asset=A_EUR, amount='100', usd_value='61.5', ), AssetBalance( time=Timestamp(1488326400), asset=A_XMR, amount='5', usd_value='135.6', ), ] datahandler.db.add_multiple_balances(balances) # Also add an unknown/invalid asset. This will generate a warning cursor = datahandler.db.conn.cursor() cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', (1469326500, 'ADSADX', '10.1', '100.5'), ) datahandler.db.conn.commit() location_data = [ LocationData( time=Timestamp(1451606400), location='kraken', usd_value='100', ), LocationData( time=Timestamp(1451606400), location='banks', usd_value='1000', ), LocationData( time=Timestamp(1461606500), location='poloniex', usd_value='50', ), LocationData( time=Timestamp(1461606500), location='kraken', usd_value='200', ), LocationData( time=Timestamp(1461606500), location='banks', usd_value='50000', ), LocationData( time=Timestamp(1491607800), location='poloniex', usd_value='100', ), LocationData( time=Timestamp(1491607800), location='kraken', usd_value='2000', ), LocationData( time=Timestamp(1491607800), location='banks', usd_value='10000', ), LocationData( time=Timestamp(1491607800), location='blockchain', usd_value='200000', ), LocationData( time=Timestamp(1451606400), location='total', usd_value='1500', ), LocationData( time=Timestamp(1461606500), location='total', usd_value='4500', ), LocationData( time=Timestamp(1491607800), location='total', usd_value='10700.5', ), ] datahandler.db.add_multiple_location_data(location_data) return balances
from rotkehlchen.constants.misc import ZERO from rotkehlchen.fval import FVal from rotkehlchen.history.price import query_usd_price_zero_if_error from rotkehlchen.inquirer import Inquirer from rotkehlchen.serialization.deserialize import deserialize_ethereum_address from rotkehlchen.typing import ChecksumEthAddress, Timestamp from rotkehlchen.user_messages import MessagesAggregator from rotkehlchen.utils.misc import from_gwei, ts_now if TYPE_CHECKING: from rotkehlchen.chain.ethereum.manager import EthereumManager from rotkehlchen.db.dbhandler import DBHandler from rotkehlchen.externalapis.beaconchain import BeaconChain, ValidatorPerformance ETH2_DEPOSIT = EthereumConstants().contract('ETH2_DEPOSIT') ETH2_DEPLOYED_TS = Timestamp(1602667372) ETH2_DEPOSITS_PREFIX = 'eth2_deposits' EVENT_ABI = [x for x in ETH2_DEPOSIT.abi if x['type'] == 'event'][0] REQUEST_DELTA_TS = 60 * 60 # 1h Eth2DepositDBTuple = ( Tuple[str, # tx_hash int, # log_index str, # from_address int, # timestamp str, # pubkey str, # withdrawal_credentials str, # value int, # validator_index
def get_historical_data( self, from_asset: Asset, to_asset: Asset, timestamp: Timestamp, historical_data_start: Timestamp, ) -> List[PriceHistoryEntry]: """ Get historical price data from cryptocompare Returns a sorted list of price entries. - May raise RemoteError if there is a problem reaching the cryptocompare server or with reading the response returned by the server - May raise UnsupportedAsset if from/to asset is not supported by cryptocompare """ log.debug( 'Retrieving historical price data from cryptocompare', from_asset=from_asset, to_asset=to_asset, timestamp=timestamp, ) cache_key = PairCacheKey(from_asset.identifier + '_' + to_asset.identifier) got_cached_value = self._got_cached_price(cache_key, timestamp) if got_cached_value: return self.price_history[cache_key].data now_ts = ts_now() cryptocompare_hourquerylimit = 2000 calculated_history: List[Dict[str, Any]] = [] if historical_data_start <= timestamp: end_date = historical_data_start else: end_date = timestamp while True: pr_end_date = end_date end_date = Timestamp(end_date + (cryptocompare_hourquerylimit) * 3600) log.debug( 'Querying cryptocompare for hourly historical price', from_asset=from_asset, to_asset=to_asset, cryptocompare_hourquerylimit=cryptocompare_hourquerylimit, end_date=end_date, ) resp = self.query_endpoint_histohour( from_asset=from_asset, to_asset=to_asset, limit=2000, to_timestamp=end_date, ) if pr_end_date != resp['TimeFrom']: # If we get more than we needed, since we are close to the now_ts # then skip all the already included entries diff = pr_end_date - resp['TimeFrom'] # If the start date has less than 3600 secs difference from previous # end date then do nothing. If it has more skip all already included entries if diff >= 3600: if resp['Data'][diff // 3600]['time'] != pr_end_date: raise RemoteError( 'Unexpected fata format in cryptocompare query_endpoint_histohour. ' 'Expected to find the previous date timestamp during ' 'cryptocompare historical data fetching', ) # just add only the part from the previous timestamp and on resp['Data'] = resp['Data'][diff // 3600:] # The end dates of a cryptocompare query do not match. The end date # can have up to 3600 secs different to the requested one since this is # hourly historical data but no more. end_dates_dont_match = (end_date < now_ts and resp['TimeTo'] != end_date) if end_dates_dont_match: if resp['TimeTo'] - end_date >= 3600: raise RemoteError( 'Unexpected fata format in cryptocompare query_endpoint_histohour. ' 'End dates do not match.', ) # else if it's just a drift within an hour just update the end_date so that # it can be picked up by the next iterations in the loop end_date = resp['TimeTo'] # If last time slot and first new are the same, skip the first new slot last_entry_equal_to_first = (len(calculated_history) != 0 and calculated_history[-1]['time'] == resp['Data'][0]['time']) if last_entry_equal_to_first: resp['Data'] = resp['Data'][1:] calculated_history += resp['Data'] if end_date >= now_ts: break # Let's always check for data sanity for the hourly prices. _check_hourly_data_sanity(calculated_history, from_asset, to_asset) # and now since we actually queried the data let's also cache them filename = self.data_directory / ('price_history_' + cache_key + '.json') log.info( 'Updating price history cache', filename=filename, from_asset=from_asset, to_asset=to_asset, ) write_history_data_in_file( data=calculated_history, filepath=filename, start_ts=historical_data_start, end_ts=now_ts, ) # Finally save the objects in memory and return them data_including_time = { 'data': calculated_history, 'start_time': historical_data_start, 'end_time': end_date, } self.price_history_file[cache_key] = filename self.price_history[cache_key] = _dict_history_to_data( data_including_time) return self.price_history[cache_key].data
def create_timestamp(datestr: str, formatstr: str = '%Y-%m-%d %H:%M:%S') -> Timestamp: """Can throw ValueError due to strptime""" return Timestamp(calendar.timegm(time.strptime(datestr, formatstr)))
data: List[PriceHistoryEntry] start_time: Timestamp end_time: Timestamp class HistoHourAssetData(NamedTuple): timestamp: Timestamp usd_price: Price # Safest starting timestamp for requesting an asset price via histohour avoiding # 0 price. Be aware `usd_price` is from the 'close' price in USD. CRYPTOCOMPARE_SPECIAL_HISTOHOUR_CASES: Dict[Asset, HistoHourAssetData] = { A_COMP: HistoHourAssetData( timestamp=Timestamp(1592629200), usd_price=Price(FVal('239.13')), ), } def _dict_history_to_entries( data: List[Dict[str, Any]]) -> List[PriceHistoryEntry]: """Turns a list of dict of history entries to a list of proper objects""" return [ PriceHistoryEntry( time=Timestamp(entry['time']), low=Price(FVal(entry['low'])), high=Price(FVal(entry['high'])), ) for entry in data ]
def _import_cryptocom_associated_entries(self, data: Any, tx_kind: str) -> None: """Look for events that have associated entries and handle them as trades. This method looks for `*_debited` and `*_credited` entries using the same timestamp to handle them as one trade. Known kind: 'dynamic_coin_swap' or 'dust_conversion' May raise: - UnknownAsset if an unknown asset is encountered in the imported files - KeyError if a row contains unexpected data entries """ multiple_rows: Dict[Any, Dict[str, Any]] = {} investments_deposits: Dict[str, List[Any]] = defaultdict(list) investments_withdrawals: Dict[str, List[Any]] = defaultdict(list) debited_row = None credited_row = None for row in data: if row['Transaction Kind'] == f'{tx_kind}_debited': timestamp = deserialize_timestamp_from_date( date=row['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) if timestamp not in multiple_rows: multiple_rows[timestamp] = {} if 'debited' not in multiple_rows[timestamp]: multiple_rows[timestamp]['debited'] = [] multiple_rows[timestamp]['debited'].append(row) elif row['Transaction Kind'] == f'{tx_kind}_credited': # They only is one credited row timestamp = deserialize_timestamp_from_date( date=row['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) if timestamp not in multiple_rows: multiple_rows[timestamp] = {} multiple_rows[timestamp]['credited'] = row elif row['Transaction Kind'] == f'{tx_kind}_deposit': asset = row['Currency'] investments_deposits[asset].append(row) elif row['Transaction Kind'] == f'{tx_kind}_withdrawal': asset = row['Currency'] investments_withdrawals[asset].append(row) for timestamp in multiple_rows: # When we convert multiple assets dust to CRO # in one time, it will create multiple debited rows with # the same timestamp debited_rows = multiple_rows[timestamp]['debited'] credited_row = multiple_rows[timestamp]['credited'] total_debited_usd = functools.reduce( lambda acc, row: acc + deserialize_asset_amount(row[ 'Native Amount (in USD)']), debited_rows, ZERO, ) # If the value of the transaction is too small (< 0,01$), # crypto.com will display 0 as native amount # if we have multiple debited rows, we can't import them # since we can't compute their dedicated rates, so we skip them if len(debited_rows) > 1 and total_debited_usd == 0: return if credited_row is not None and len(debited_rows) != 0: for debited_row in debited_rows: description = credited_row['Transaction Description'] notes = f'{description}\nSource: crypto.com (CSV import)' # No fees here fee = Fee(ZERO) fee_currency = A_USD base_asset = symbol_to_asset_or_token( credited_row['Currency']) quote_asset = symbol_to_asset_or_token( debited_row['Currency']) part_of_total = ( FVal(1) if len(debited_rows) == 1 else deserialize_asset_amount( debited_row["Native Amount (in USD)"], ) / total_debited_usd) quote_amount_sold = deserialize_asset_amount( debited_row['Amount'], ) * part_of_total base_amount_bought = deserialize_asset_amount( credited_row['Amount'], ) * part_of_total rate = Price(abs(base_amount_bought / quote_amount_sold)) trade = Trade( timestamp=timestamp, location=Location.CRYPTOCOM, base_asset=base_asset, quote_asset=quote_asset, trade_type=TradeType.BUY, amount=AssetAmount(base_amount_bought), rate=rate, fee=fee, fee_currency=fee_currency, link='', notes=notes, ) self.db.add_trades([trade]) # Compute investments profit if len(investments_withdrawals) != 0: for asset in investments_withdrawals: asset_object = symbol_to_asset_or_token(asset) if asset not in investments_deposits: log.error( f'Investment withdrawal without deposit at crypto.com. Ignoring ' f'staking info for asset {asset_object}', ) continue # Sort by date in ascending order withdrawals_rows = sorted( investments_withdrawals[asset], key=lambda x: deserialize_timestamp_from_date( date=x['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ), ) investments_rows = sorted( investments_deposits[asset], key=lambda x: deserialize_timestamp_from_date( date=x['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ), ) last_date = Timestamp(0) for withdrawal in withdrawals_rows: withdrawal_date = deserialize_timestamp_from_date( date=withdrawal['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) amount_deposited = ZERO for deposit in investments_rows: deposit_date = deserialize_timestamp_from_date( date=deposit['Timestamp (UTC)'], formatstr='%Y-%m-%d %H:%M:%S', location='cryptocom', ) if last_date < deposit_date <= withdrawal_date: # Amount is negative amount_deposited += deserialize_asset_amount( deposit['Amount']) amount_withdrawal = deserialize_asset_amount( withdrawal['Amount']) # Compute profit profit = amount_withdrawal + amount_deposited if profit >= ZERO: last_date = withdrawal_date action = LedgerAction( identifier=0, # whatever is not used at insertion timestamp=withdrawal_date, action_type=LedgerActionType.INCOME, location=Location.CRYPTOCOM, amount=AssetAmount(profit), asset=asset_object, rate=None, rate_asset=None, link=None, notes=f'Stake profit for asset {asset}', ) self.db_ledger.add_ledger_action(action)
def query_ethereum_txlist( address: EthAddress, internal: bool, from_block: Optional[int] = None, to_block: Optional[int] = None, ) -> List[EthereumTransaction]: log.debug( 'Querying etherscan for tx list', sensitive_log=True, internal=internal, eth_address=address, from_block=from_block, to_block=to_block, ) result = list() if internal: reqstring = ( 'https://api.etherscan.io/api?module=account&action=' 'txlistinternal&address={}'.format(address) ) else: reqstring = ( 'https://api.etherscan.io/api?module=account&action=' 'txlist&address={}'.format(address) ) if from_block: reqstring += '&startblock={}'.format(from_block) if to_block: reqstring += '&endblock={}'.format(to_block) resp = request_get_dict(reqstring) if 'status' not in resp or convert_to_int(resp['status']) != 1: status = convert_to_int(resp['status']) if status == 0 and resp['message'] == 'No transactions found': return list() log.error( 'Querying etherscan for tx list failed', sensitive_log=True, internal=internal, eth_address=address, from_block=from_block, to_block=to_block, error=resp['message'], ) # else unknown error raise ValueError( 'Failed to query txlist from etherscan with query: {} . ' 'Response was: {}'.format(reqstring, resp), ) log.debug('Etherscan tx list query result', results_num=len(resp['result'])) for v in resp['result']: # internal tx list contains no gasprice gas_price = FVal(-1) if internal else FVal(v['gasPrice']) result.append(EthereumTransaction( timestamp=Timestamp(convert_to_int(v['timeStamp'])), block_number=convert_to_int(v['blockNumber']), hash=v['hash'], from_address=v['from'], to_address=v['to'], value=FVal(v['value']), gas=FVal(v['gas']), gas_price=gas_price, gas_used=FVal(v['gasUsed']), )) return result
class HistoryProcessingSchema(Schema): from_timestamp = TimestampField(missing=Timestamp(0)) to_timestamp = TimestampField(missing=ts_now) async_query = fields.Boolean(missing=False)
def get_eth2_staking_deposits( ethereum: 'EthereumManager', addresses: List[ChecksumEthAddress], has_premium: bool, msg_aggregator: MessagesAggregator, database: 'DBHandler', ) -> List[Eth2Deposit]: """Get the addresses' ETH2 staking deposits For any given new address query on-chain from the ETH2 deposit contract deployment timestamp until now. For any existing address query on-chain from the minimum last used query range "end_ts" (among all the existing addresses) until now, as long as the difference between both is gte than REQUEST_DELTA_TS. Then write in DB all the new deposits and finally return them all. """ new_deposits: List[Eth2Deposit] = [] new_addresses: List[ChecksumEthAddress] = [] existing_addresses: List[ChecksumEthAddress] = [] to_ts = ts_now() min_from_ts = to_ts # Get addresses' last used query range for ETH2 deposits for address in addresses: entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}' deposits_range = database.get_used_query_range(name=entry_name) if not deposits_range: new_addresses.append(address) else: existing_addresses.append(address) min_from_ts = min(min_from_ts, deposits_range[1]) # Get deposits for new addresses if new_addresses: deposits_ = _get_eth2_staking_deposits_onchain( ethereum=ethereum, addresses=new_addresses, has_premium=has_premium, msg_aggregator=msg_aggregator, from_ts=ETH2_DEPLOYED_TS, to_ts=to_ts, ) new_deposits.extend(deposits_) for address in new_addresses: entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}' database.update_used_query_range( name=entry_name, start_ts=ETH2_DEPLOYED_TS, end_ts=to_ts, ) # Get new deposits for existing addresses if existing_addresses and min_from_ts + REQUEST_DELTA_TS <= to_ts: deposits_ = _get_eth2_staking_deposits_onchain( ethereum=ethereum, addresses=existing_addresses, has_premium=has_premium, msg_aggregator=msg_aggregator, from_ts=Timestamp(min_from_ts), to_ts=to_ts, ) new_deposits.extend(deposits_) for address in existing_addresses: entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}' database.update_used_query_range( name=entry_name, start_ts=Timestamp(min_from_ts), end_ts=to_ts, ) # Insert new deposits in DB if new_deposits: database.add_eth2_deposits(new_deposits) # Fetch all DB deposits for the given addresses deposits: List[Eth2Deposit] = [] for address in addresses: db_deposits = database.get_eth2_deposits(address=address) deposits.extend(db_deposits) deposits.sort(key=lambda deposit: (deposit.timestamp, deposit.log_index)) return deposits
class AsyncHistoricalQuerySchema(AsyncQueryArgumentSchema): """A schema for getters that have 2 arguments. One to enable async querying and another to force reset DB data by querying everytying again""" reset_db_data = fields.Boolean(missing=False) from_timestamp = TimestampField(missing=Timestamp(0)) to_timestamp = TimestampField(missing=ts_now)
assert '%' in vault['roi'] assert FVal(vault['vault_value']['amount']) > ZERO assert FVal(vault['vault_value']['usd_value']) > ZERO assert FVal(vault['underlying_value']['amount']) > ZERO assert FVal(vault['underlying_value']['usd_value']) > ZERO # Expected events as of writing of the test. USD values are all mocked. EXPECTED_HISTORY = { 'YALINK Vault': YearnVaultHistory( events=[ YearnVaultEvent( event_type='deposit', block_number=10693331, timestamp=Timestamp(1597877037), from_asset=EthereumToken('aLINK'), from_value=Balance(amount=FVal('389.42925099069838547'), usd_value=ONE), to_asset=EthereumToken('yaLINK'), to_value=Balance(amount=FVal('378.670298739289527979'), usd_value=ONE), realized_pnl=None, tx_hash= '0x14bbb454cfe3bfbef4e7ea2b03e7aac022048480b3d2f81ea8d191f0543848c4', log_index=102, ), YearnVaultEvent( event_type='deposit', block_number=10843734, timestamp=Timestamp(1599868806),
class TimerangeLocationQuerySchema(Schema): from_timestamp = TimestampField(missing=Timestamp(0)) to_timestamp = TimestampField(missing=ts_now) location = LocationField(missing=None) async_query = fields.Boolean(missing=False)
def query_balances( self, requested_save_data: bool = False, timestamp: Timestamp = None, ) -> Dict[str, Any]: """Query all balances rotkehlchen can see. If requested_save_data is True then the data are saved in the DB. If timestamp is None then the current timestamp is used. If a timestamp is given then that is the time that the balances are going to be saved in the DB Returns a dictionary with the queried balances. """ log.info('query_balances called', requested_save_data=requested_save_data) balances = {} problem_free = True for _, exchange in self.exchange_manager.connected_exchanges.items(): exchange_balances, _ = exchange.query_balances() # If we got an error, disregard that exchange but make sure we don't save data if not isinstance(exchange_balances, dict): problem_free = False else: balances[exchange.name] = exchange_balances result, error_or_empty = self.blockchain.query_balances() if error_or_empty == '': balances['blockchain'] = result['totals'] else: problem_free = False result = self.query_fiat_balances() if result != {}: balances['banks'] = result combined = combine_stat_dicts([v for k, v in balances.items()]) total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value')) for k, v in balances.items()] # calculate net usd value net_usd = FVal(0) for _, v in combined.items(): net_usd += FVal(v['usd_value']) stats: Dict[str, Any] = { 'location': {}, 'net_usd': net_usd, } for entry in total_usd_per_location: name = entry[0] total = entry[1] if net_usd != FVal(0): percentage = (total / net_usd).to_percentage() else: percentage = '0%' stats['location'][name] = { 'usd_value': total, 'percentage_of_net_value': percentage, } for k, v in combined.items(): if net_usd != FVal(0): percentage = (v['usd_value'] / net_usd).to_percentage() else: percentage = '0%' combined[k]['percentage_of_net_value'] = percentage result_dict = merge_dicts(combined, stats) allowed_to_save = requested_save_data or self.data.should_save_balances( ) if problem_free and allowed_to_save: if not timestamp: timestamp = Timestamp(int(time.time())) self.data.save_balances_data(data=result_dict, timestamp=timestamp) log.debug('query_balances data saved') else: log.debug( 'query_balances data not saved', allowed_to_save=allowed_to_save, problem_free=problem_free, ) # After adding it to the saved file we can overlay additional data that # is not required to be saved in the history file try: details = self.accountant.events.details for asset, (tax_free_amount, average_buy_value) in details.items(): if asset not in result_dict: continue result_dict[asset]['tax_free_amount'] = tax_free_amount result_dict[asset]['average_buy_value'] = average_buy_value current_price = result_dict[asset]['usd_value'] / result_dict[ asset]['amount'] if average_buy_value != FVal(0): result_dict[asset]['percent_change'] = ( ((current_price - average_buy_value) / average_buy_value) * 100) else: result_dict[asset]['percent_change'] = 'INF' except AttributeError: pass return result_dict
def test_get_eth2_staking_deposits_fetch_from_db( # pylint: disable=unused-argument ethereum_manager, call_order, ethereum_manager_connect_at_start, inquirer, price_historian, freezer, ): """ Test new on-chain requests for existing addresses requires a difference of REQUEST_DELTA_TS since last used query range `end_ts`. """ freezer.move_to(datetime.fromtimestamp(EXPECTED_DEPOSITS[0].timestamp)) ts_now = int(datetime.now().timestamp()) # 1604506685 database = MagicMock() database.get_used_query_range.side_effect = [ (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)), (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)), (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)), ] database.get_eth2_deposits.side_effect = [ [], # no on-chain request, nothing in DB [], # no on-chain request, nothing in DB [EXPECTED_DEPOSITS[0]], # on-chain request, deposit in DB ] with patch( 'rotkehlchen.chain.ethereum.eth2._get_eth2_staking_deposits_onchain', ) as mock_get_eth2_staking_deposits_onchain: # 3rd call return mock_get_eth2_staking_deposits_onchain.return_value = [ EXPECTED_DEPOSITS[0] ] wait_until_all_nodes_connected( ethereum_manager_connect_at_start=ethereum_manager_connect_at_start, ethereum=ethereum_manager, ) message_aggregator = MessagesAggregator() # First call deposit_results_onchain = get_eth2_staking_deposits( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, database=database, ) assert deposit_results_onchain == [] mock_get_eth2_staking_deposits_onchain.assert_not_called() # NB: Move time to ts_now + REQUEST_DELTA_TS - 1s freezer.move_to(datetime.fromtimestamp(ts_now + REQUEST_DELTA_TS - 1)) # Second call deposit_results_onchain = get_eth2_staking_deposits( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, database=database, ) assert deposit_results_onchain == [] mock_get_eth2_staking_deposits_onchain.assert_not_called() # NB: Move time to ts_now + REQUEST_DELTA_TS (triggers request) freezer.move_to(datetime.fromtimestamp(ts_now + REQUEST_DELTA_TS)) # Third call deposit_results_onchain = get_eth2_staking_deposits( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, database=database, ) assert deposit_results_onchain == [EXPECTED_DEPOSITS[0]] mock_get_eth2_staking_deposits_onchain.assert_called_with( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, from_ts=Timestamp(ts_now), to_ts=Timestamp(ts_now + REQUEST_DELTA_TS), )
def test_binance_query_deposits_withdrawals(function_scope_binance): """Test the happy case of binance deposit withdrawal query NB: set `start_ts` and `end_ts` with a difference less than 90 days to prevent requesting with a time delta. """ start_ts = 1508022000 # 2017-10-15 end_ts = 1508540400 # 2017-10-21 (less than 90 days since `start_ts`) binance = function_scope_binance def mock_get_deposit_withdrawal(url): # pylint: disable=unused-argument if 'deposit' in url: response_str = BINANCE_DEPOSITS_HISTORY_RESPONSE else: response_str = BINANCE_WITHDRAWALS_HISTORY_RESPONSE return MockResponse(200, response_str) with patch.object(binance.session, 'get', side_effect=mock_get_deposit_withdrawal): movements = binance.query_online_deposits_withdrawals( start_ts=Timestamp(start_ts), end_ts=Timestamp(end_ts), ) errors = binance.msg_aggregator.consume_errors() warnings = binance.msg_aggregator.consume_warnings() assert len(errors) == 0 assert len(warnings) == 0 assert len(movements) == 4 assert movements[0].location == Location.BINANCE assert movements[0].category == AssetMovementCategory.DEPOSIT assert movements[0].timestamp == 1508198532 assert isinstance(movements[0].asset, Asset) assert movements[0].asset == A_ETH assert movements[0].amount == FVal('0.04670582') assert movements[0].fee == ZERO assert movements[1].location == Location.BINANCE assert movements[1].category == AssetMovementCategory.DEPOSIT assert movements[1].timestamp == 1508398632 assert isinstance(movements[1].asset, Asset) assert movements[1].asset == A_XMR assert movements[1].amount == FVal('1000') assert movements[1].fee == ZERO assert movements[2].location == Location.BINANCE assert movements[2].category == AssetMovementCategory.WITHDRAWAL assert movements[2].timestamp == 1508488245 assert isinstance(movements[2].asset, Asset) assert movements[2].asset == A_ETH assert movements[2].amount == FVal('1') assert movements[2].fee == ZERO assert movements[3].location == Location.BINANCE assert movements[3].category == AssetMovementCategory.WITHDRAWAL assert movements[3].timestamp == 1508512521 assert isinstance(movements[3].asset, Asset) assert movements[3].asset == A_XMR assert movements[3].amount == FVal('850.1') assert movements[3].fee == ZERO
success, _ = data.set_fiat_balance(S_EUR, '') balances = data.get_fiat_balances() assert len(balances) == 1 assert balances[A_CNY] == amount_cny # also check that all the fiat assets in the fiat table are in # all_assets.json for fiat in FIAT_CURRENCIES: success, _ = data.set_fiat_balance(fiat, '1') assert success asset_balances = [ AssetBalance( time=Timestamp(1451606400), asset=A_USD, amount='10', usd_value='10', ), AssetBalance( time=Timestamp(1451606401), asset=A_ETH, amount='2', usd_value='1.7068', ), AssetBalance( time=Timestamp(1465171200), asset=A_USD, amount='500', usd_value='500',
def test_api_query_dict_calls_with_time_delta(function_scope_binance): """Test the `api_query_dict()` arguments when deposit/withdraw history requests involve a time delta. From `start_ts` to `end_ts` there is a difference gte 90 days, which forces to request using a time delta (from API_TIME_INTERVAL_CONSTRAINT_TS). """ now_ts_ms = int(datetime.utcnow().timestamp()) * 1000 start_ts = 0 # Defaults to BINANCE_LAUNCH_TS end_ts = BINANCE_LAUNCH_TS + API_TIME_INTERVAL_CONSTRAINT_TS # eq 90 days after expected_calls = [ call( 'depositHistory.html', options={ 'timestamp': now_ts_ms, 'startTime': 1500001200000, 'endTime': 1507777199999, }, ), call( 'depositHistory.html', options={ 'timestamp': now_ts_ms, 'startTime': 1507777200000, 'endTime': 1507777200000, }, ), call( 'withdrawHistory.html', options={ 'timestamp': now_ts_ms, 'startTime': 1500001200000, 'endTime': 1507777199999, }, ), call( 'withdrawHistory.html', options={ 'timestamp': now_ts_ms, 'startTime': 1507777200000, 'endTime': 1507777200000, }, ), ] binance = function_scope_binance def get_time_delta_deposit_result(): results = [ BINANCE_DEPOSITS_HISTORY_RESPONSE, '{}', ] for result in results: yield result def get_time_delta_withdraw_result(): results = [ '{}', BINANCE_WITHDRAWALS_HISTORY_RESPONSE, ] for result in results: yield result def mock_get_deposit_withdrawal(url): # pylint: disable=unused-argument if 'deposit' in url: response_str = next(get_deposit_result) else: response_str = next(get_withdraw_result) return MockResponse(200, response_str) get_deposit_result = get_time_delta_deposit_result() get_withdraw_result = get_time_delta_withdraw_result() with patch.object(binance.session, 'get', side_effect=mock_get_deposit_withdrawal): with patch.object(binance, 'api_query_dict') as mock_api_query_dict: binance.query_online_deposits_withdrawals( start_ts=Timestamp(start_ts), end_ts=Timestamp(end_ts), ) assert mock_api_query_dict.call_args_list == expected_calls
def add_starting_balances(datahandler) -> List[AssetBalance]: """Adds some starting balances and other data to a testing instance""" balances = [ AssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_BTC, amount='1', usd_value='1222.66', ), AssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_ETH, amount='10', usd_value='4517.4', ), AssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_EUR, amount='100', usd_value='61.5', ), AssetBalance( category=BalanceType.ASSET, time=Timestamp(1488326400), asset=A_XMR, amount='5', usd_value='135.6', ), ] datahandler.db.add_multiple_balances(balances) # Also add an unknown/invalid asset. This will generate a warning cursor = datahandler.db.conn.cursor() cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', (1469326500, 'ADSADX', '10.1', '100.5'), ) datahandler.db.conn.commit() location_data = [ LocationData( time=Timestamp(1451606400), location=Location.KRAKEN.serialize_for_db(), usd_value='100', ), LocationData( time=Timestamp(1451606400), location=Location.BANKS.serialize_for_db(), usd_value='1000', ), LocationData( time=Timestamp(1461606500), location=Location.POLONIEX.serialize_for_db(), usd_value='50', ), LocationData( time=Timestamp(1461606500), location=Location.KRAKEN.serialize_for_db(), usd_value='200', ), LocationData( time=Timestamp(1461606500), location=Location.BANKS.serialize_for_db(), usd_value='50000', ), LocationData( time=Timestamp(1491607800), location=Location.POLONIEX.serialize_for_db(), usd_value='100', ), LocationData( time=Timestamp(1491607800), location=Location.KRAKEN.serialize_for_db(), usd_value='2000', ), LocationData( time=Timestamp(1491607800), location=Location.BANKS.serialize_for_db(), usd_value='10000', ), LocationData( time=Timestamp(1491607800), location=Location.BLOCKCHAIN.serialize_for_db(), usd_value='200000', ), LocationData( time=Timestamp(1451606400), location=Location.TOTAL.serialize_for_db(), usd_value='1500', ), LocationData( time=Timestamp(1461606500), location=Location.TOTAL.serialize_for_db(), usd_value='4500', ), LocationData( time=Timestamp(1491607800), location=Location.TOTAL.serialize_for_db(), usd_value='10700.5', ), ] datahandler.db.add_multiple_location_data(location_data) return balances
'timestamp': 1523399409, # 10/04/2018 'block_number': 5417790, 'hash': DUMMY_HASH, 'from_address': DUMMY_ADDRESS, 'to_address': DUMMY_ADDRESS, 'value': 12323, 'gas': 5000000, 'gas_price': 2100000000, 'gas_used': 1900000, }, ] margin_history = [ MarginPosition( # before query period -- BTC/EUR: 422.90 exchange='poloniex', open_time=Timestamp(1463184190), # 14/05/2016 close_time=Timestamp(1464393600), # 28/05/2016 profit_loss=FVal(0.05), pl_currency=A_BTC, notes='margin1', ), MarginPosition( # before query period -- BTC/EUR: 542.87 exchange='poloniex', open_time=Timestamp(1472428800), # 29/08/2016 close_time=Timestamp(1473897600), # 15/09/2016 profit_loss=FVal('-0.042'), pl_currency=A_BTC, notes='margin2', ), MarginPosition( # BTC/EUR: 1039.935 exchange='poloniex',
TEST_ACCOUNTS = [ # For mint/redeem '0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12', # For borrowing/liquidations '0xC440f3C87DC4B6843CABc413916220D4f4FeD117', # For mint/redeem + comp '0xF59D4937BF1305856C3a267bB07791507a3377Ee', # For repay '0x65304d6aff5096472519ca86a6a1fea31cb47Ced', ] EXPECTED_EVENTS = [CompoundEvent( event_type='mint', address=string_to_ethereum_address('0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12'), block_number=9443573, timestamp=Timestamp(1581184577), asset=A_DAI, value=Balance(amount=FVal('2988.4343'), usd_value=FVal('3012.3417744')), to_asset=A_CDAI, to_value=Balance(amount=FVal('148015.6966153'), usd_value=FVal('3012.3417744')), realized_pnl=None, tx_hash='0xacc2e21f911a4e438966694e9ad16747878a15dae52de62a09f1ebabc8b26c8d', log_index=130, ), CompoundEvent( event_type='redeem', address=string_to_ethereum_address('0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12'), block_number=9533397, timestamp=Timestamp(1582378248), asset=A_CDAI, value=Balance(amount=FVal('148015.6966153'), usd_value=FVal('3075.319825609865034570156')), to_asset=A_DAI,
class StatisticsAssetBalanceSchema(Schema): asset = AssetField(required=True) from_timestamp = TimestampField(missing=Timestamp(0)) to_timestamp = TimestampField(missing=ts_now)
def ts_now() -> Timestamp: return Timestamp(int(time.time()))
def populate_db_and_check_for_asset_renaming( cursor: Cursor, data: DataHandler, data_dir: Path, msg_aggregator: MessagesAggregator, username: str, to_rename_asset: str, renamed_asset: Asset, target_version: int, ): # Manually input data to the affected tables. # timed_balances, multisettings and (external) trades # At this time point we only have occurence of the to_rename_asset cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', ('1557499129', to_rename_asset, '10.1', '150'), ) # But add a time point where we got both to_rename_asset and # renamed_asset. This is to test merging if renaming falls in time where # both new and old asset had entries cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', ('1558499129', to_rename_asset, '1.1', '15'), ) cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', ('1558499129', renamed_asset.identifier, '2.2', '25'), ) # Add one different asset for control test cursor.execute( 'INSERT INTO timed_balances(' ' time, currency, amount, usd_value) ' ' VALUES(?, ?, ?, ?)', ('1556392121', 'ETH', '5.5', '245'), ) # Also populate an ignored assets entry cursor.execute( 'INSERT INTO multisettings(name, value) VALUES(?, ?)', ('ignored_asset', to_rename_asset), ) cursor.execute( 'INSERT INTO multisettings(name, value) VALUES(?, ?)', ('ignored_asset', 'RDN'), ) # Finally include it in some trades cursor.execute( 'INSERT INTO trades(' ' time,' ' location,' ' pair,' ' type,' ' amount,' ' rate,' ' fee,' ' fee_currency,' ' link,' ' notes)' 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( 1543298883, 'external', 'ETH_EUR', 'buy', '100', '0.5', '0.1', 'EUR', '', '', ), ) cursor.execute( 'INSERT INTO trades(' ' time,' ' location,' ' pair,' ' type,' ' amount,' ' rate,' ' fee,' ' fee_currency,' ' link,' ' notes)' 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( 1563298883, 'kraken', f'{to_rename_asset}_EUR', 'buy', '100', '0.5', '0.1', to_rename_asset, '', '', ), ) cursor.execute( 'INSERT INTO trades(' ' time,' ' location,' ' pair,' ' type,' ' amount,' ' rate,' ' fee,' ' fee_currency,' ' link,' ' notes)' 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( 1564218181, 'binance', f'{to_rename_asset}_EUR', 'buy', '100', '0.5', '0.1', 'BNB', '', '', ), ) data.db.conn.commit() # now relogin and check that all tables have appropriate data del data data = DataHandler(data_dir, msg_aggregator) with creation_patch, target_patch(target_version=target_version): data.unlock(username, '123', create_new=False) # Check that owned and ignored assets reflect the new state ignored_assets = data.db.get_ignored_assets() assert A_RDN in ignored_assets assert renamed_asset in ignored_assets owned_assets = data.db.query_owned_assets() assert A_ETH in owned_assets assert renamed_asset in owned_assets # Make sure that the merging of both new and old name entry in same timestamp works timed_balances = data.db.query_timed_balances( from_ts=Timestamp(0), to_ts=Timestamp(2556392121), asset=renamed_asset, ) assert len(timed_balances) == 2 assert timed_balances[0].time == 1557499129 assert timed_balances[0].amount == '10.1' assert timed_balances[0].usd_value == '150' assert timed_balances[1].time == 1558499129 assert timed_balances[1].amount == '3.3' assert timed_balances[1].usd_value == '40' # Assert that trades got renamed properly cursor = data.db.conn.cursor() query = ('SELECT id,' ' time,' ' location,' ' pair,' ' type,' ' amount,' ' rate,' ' fee,' ' fee_currency,' ' link,' ' notes FROM trades ORDER BY time ASC;') results = cursor.execute(query) trades = [] for result in results: trades.append({ 'id': result[0], 'timestamp': result[1], 'location': result[2], 'pair': result[3], 'trade_type': result[4], 'amount': result[5], 'rate': result[6], 'fee': result[7], 'fee_currency': result[8], 'link': result[9], 'notes': result[10], }) assert len(trades) == 3 assert trades[0]['fee_currency'] == 'EUR' assert trades[0]['pair'] == 'ETH_EUR' assert trades[1]['fee_currency'] == renamed_asset.identifier assert trades[1]['pair'] == f'{renamed_asset.identifier}_EUR' assert trades[2]['pair'] == f'{renamed_asset.identifier}_EUR' assert data.db.get_version() == target_version