def deserialize_transaction_from_etherscan( data: Dict[str, Any], internal: bool, ) -> EthereumTransaction: """Reads dict data of a transaction from etherscan and deserializes it Can raise DeserializationError if something is wrong """ try: # internal tx list contains no gasprice gas_price = FVal(-1) if internal else FVal(data['gasPrice']) tx_hash = read_hash(data, 'hash') input_data = read_hash(data, 'input') timestamp = deserialize_timestamp(data['timeStamp']) block_number = read_integer(data, 'blockNumber') nonce = -1 if internal else read_integer(data, 'nonce') return EthereumTransaction( timestamp=timestamp, block_number=block_number, tx_hash=tx_hash, from_address=data['from'], to_address=data['to'], value=deserialize_fval(data['value']), gas=deserialize_fval(data['gas']), gas_price=gas_price, gas_used=deserialize_fval(data['gasUsed']), input_data=input_data, nonce=nonce, ) except KeyError as e: raise DeserializationError( f'Etherscan ethereum transaction missing expected key {str(e)}')
def get_value_stats( self, query_filter: HistoryEventFilterQuery, ) -> Tuple[FVal, List[Tuple[Asset, FVal, FVal]]]: """Returns the sum of the USD value at the time of acquisition and the amount received by asset""" cursor = self.db.conn.cursor() usd_value = ZERO query_filters, bindings = query_filter.prepare(with_pagination=False, with_order=False) try: query = 'SELECT SUM(CAST(usd_value AS REAL)) FROM history_events ' + query_filters result = cursor.execute(query, bindings).fetchone()[0] if result is not None: usd_value = deserialize_fval( value=result, name='usd value in history events stats', location='get_value_stats', ) except DeserializationError as e: log.error(f'Didnt get correct valid usd_value for history_events query. {str(e)}') query = ( 'SELECT asset, SUM(CAST(amount AS REAL)), SUM(CAST(usd_value AS REAL)) ' + 'FROM history_events ' + query_filters + ' GROUP BY asset;' ) result = cursor.execute(query, bindings) assets_amounts = [] for row in result: try: asset = Asset(row[0]) amount = deserialize_fval( value=row[1], name='total amount in history events stats', location='get_value_stats', ) sum_of_usd_values = deserialize_fval( value=row[2], name='total usd value in history events stats', location='get_value_stats', ) assets_amounts.append((asset, amount, sum_of_usd_values)) except UnknownAsset as e: log.debug(f'Found unknown asset {row[0]} in staking event. {str(e)}') except DeserializationError as e: log.debug(f'Failed to deserialize amount {row[1]}. {str(e)}') return usd_value, assets_amounts
def deserialize_from_db(cls: Type[T], timestamp: Timestamp, stringified_json: str) -> T: """May raise: - DeserializationError if something is wrong with reading this from the DB """ try: data = json.loads(stringified_json) except json.decoder.JSONDecodeError as e: raise DeserializationError( f'Could not decode processed accounting event json from the DB due to {str(e)}', ) from e try: pnl_taxable = deserialize_fval(data['pnl_taxable'], name='pnl_taxable', location='processed event decoding') # noqa: E501 pnl_free = deserialize_fval(data['pnl_free'], name='pnl_free', location='processed event decoding') # noqa: E501 if data['cost_basis'] is None: cost_basis = None else: cost_basis = CostBasisInfo.deserialize(data['cost_basis']) event = cls( type=AccountingEventType.deserialize(data['type']), notes=data['notes'], location=Location.deserialize(data['location']), timestamp=timestamp, asset=Asset(data['asset']), free_amount=deserialize_fval(data['free_amount'], name='free_amount', location='processed event decoding'), # noqa: E501 taxable_amount=deserialize_fval(data['taxable_amount'], name='taxable_amount', location='processed event decoding'), # noqa: E501 price=deserialize_price(data['price']), pnl=PNL(free=pnl_free, taxable=pnl_taxable), cost_basis=cost_basis, index=data['index'], extra_data=data['extra_data'], ) event.count_cost_basis_pnl = data['count_cost_basis_pnl'] event.count_entire_amount_spend = data['count_entire_amount_spend'] return event except KeyError as e: raise DeserializationError(f'Could not decode processed accounting event json from the DB due to missing key {str(e)}') from e # noqa: E501
def rows_missing_prices_in_base_entries( self, filter_query: HistoryEventFilterQuery, ) -> List[Tuple[str, FVal, Asset, Timestamp]]: """ Get missing prices for history base entries based on filter query """ query, bindings = filter_query.prepare() query = 'SELECT identifier, amount, asset, timestamp FROM history_events ' + query result = [] cursor = self.db.conn.cursor() cursor.execute(query, bindings) for identifier, amount_raw, asset_name, timestamp in cursor: try: amount = deserialize_fval( value=amount_raw, name='historic base entry usd_value query', location='query_missing_prices', ) result.append( ( identifier, amount, Asset(asset_name), ts_ms_to_sec(TimestampMS(timestamp)), ), ) except DeserializationError as e: log.error( f'Failed to read value from historic base entry {identifier} ' f'with amount. {str(e)}', ) except UnknownAsset as e: log.error( f'Failed to read asset from historic base entry {identifier} ' f'with asset identifier {asset_name}. {str(e)}', ) return result
def history_event_from_kraken( events: List[Dict[str, Any]], name: str, msg_aggregator: MessagesAggregator, ) -> Tuple[List[HistoryBaseEntry], bool]: """ This function gets raw data from kraken and creates a list of related history events to be used in the app. It returns a list of events and a boolean in the case that an unknown type is found. """ group_events = [] found_unknown_event = False current_fee_index = len(events) for idx, raw_event in enumerate(events): try: timestamp = TimestampMS((deserialize_fval( value=raw_event['time'], name='time', location='kraken ledger processing', ) * 1000).to_int(exact=False)) identifier = raw_event['refid'] event_type = kraken_ledger_entry_type_to_ours(raw_event['type']) asset = asset_from_kraken(raw_event['asset']) event_subtype = HistoryEventSubType.NONE notes = None raw_amount = deserialize_asset_amount(raw_event['amount']) # If we don't know how to handle an event atm or we find an unsupported # event type the logic will be to store it as unknown and if in the future # we need some information from it we can take actions to process them if event_type == HistoryEventType.TRANSFER: if raw_event['subtype'] == 'spottostaking': event_type = HistoryEventType.STAKING event_subtype = HistoryEventSubType.DEPOSIT_ASSET elif raw_event['subtype'] == 'stakingfromspot': event_type = HistoryEventType.STAKING event_subtype = HistoryEventSubType.RECEIVE_WRAPPED elif raw_event['subtype'] == 'stakingtospot': event_type = HistoryEventType.STAKING event_subtype = HistoryEventSubType.REMOVE_ASSET elif raw_event['subtype'] == 'spotfromstaking': event_type = HistoryEventType.STAKING event_subtype = HistoryEventSubType.RETURN_WRAPPED elif event_type == HistoryEventType.ADJUSTMENT: if raw_amount < ZERO: event_subtype = HistoryEventSubType.SPEND else: event_subtype = HistoryEventSubType.RECEIVE elif event_type == HistoryEventType.STAKING: event_subtype = HistoryEventSubType.REWARD elif event_type == HistoryEventType.INFORMATIONAL: found_unknown_event = True notes = raw_event['type'] log.warning( f'Encountered kraken historic event type we do not process. {raw_event}', ) fee_amount = deserialize_asset_amount(raw_event['fee']) # Make sure to not generate an event for KFEES that is not of type FEE if asset != A_KFEE: group_events.append( HistoryBaseEntry( event_identifier=identifier, sequence_index=idx, timestamp=timestamp, location=Location.KRAKEN, location_label=name, asset=asset, balance=Balance( amount=raw_amount, usd_value=ZERO, ), notes=notes, event_type=event_type, event_subtype=event_subtype, )) if fee_amount != ZERO: group_events.append( HistoryBaseEntry( event_identifier=identifier, sequence_index=current_fee_index, timestamp=timestamp, location=Location.KRAKEN, location_label=name, asset=asset, balance=Balance( amount=fee_amount, usd_value=ZERO, ), notes=notes, event_type=event_type, event_subtype=HistoryEventSubType.FEE, )) # Increase the fee index to not have duplicates in the case of having a normal # fee and KFEE current_fee_index += 1 except (DeserializationError, KeyError, UnknownAsset) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Keyrror {msg}' msg_aggregator.add_error( f'Failed to read ledger event from kraken {raw_event} due to {msg}', ) return [], False return group_events, found_unknown_event
def query_kraken_ledgers(self, start_ts: Timestamp, end_ts: Timestamp) -> bool: """ Query Kraken's ledger to retrieve events and transform them to our internal representation of history events. Internally we look for the query range that needs to be queried in the range (start_ts, end_ts) to avoid double quering the kraken API when this method is called for deposits/withdrawals and trades. The events queried are then stored in the database. Returns true if any query to the kraken API was not successful """ ranges = DBQueryRanges(self.db) range_query_name = f'{self.location}_history_events_{self.name}' ranges_to_query = ranges.get_location_query_ranges( location_string=range_query_name, start_ts=start_ts, end_ts=end_ts, ) with_errors = False for query_start_ts, query_end_ts in ranges_to_query: log.debug( f'Querying kraken ledger entries from {query_start_ts} to {query_end_ts}' ) try: response, with_errors = self.query_until_finished( endpoint='Ledgers', keyname='ledger', start_ts=query_start_ts, end_ts=query_end_ts, extra_dict={}, ) except RemoteError as e: self.msg_aggregator.add_error( f'Failed to query kraken ledger between {query_start_ts} and ' f'{query_end_ts}. {str(e)}', ) return True # Group related events raw_events_groupped = defaultdict(list) for raw_event in response: raw_events_groupped[raw_event['refid']].append(raw_event) new_events = [] for events in raw_events_groupped.values(): try: events = sorted( events, key=lambda x: deserialize_fval(x[ 'time'], 'time', 'kraken ledgers') * 1000, ) except DeserializationError as e: self.msg_aggregator.add_error( f'Failed to read timestamp in kraken event group ' f'due to {str(e)}. For more information read the logs. Skipping event', ) log.error(f'Failed to read timestamp for {events}') continue group_events, found_unknown_event = history_event_from_kraken( events=events, name=self.name, msg_aggregator=self.msg_aggregator, ) if found_unknown_event: for event in group_events: event.event_type = HistoryEventType.INFORMATIONAL new_events.extend(group_events) if len(new_events) != 0: try: self.history_events_db.add_history_events(new_events) except InputError as e: self.msg_aggregator.add_error( f'Failed to save kraken events from {query_start_ts} to {query_end_ts} ' f'in database. {str(e)}', ) ranges.update_used_query_range( location_string=range_query_name, queried_ranges=[(start_ts, end_ts)] + ranges_to_query, ) if with_errors is True: return True # we had errors so stop any further queries and quit return False # no errors
def query_balances(self, **kwargs: Any) -> ExchangeQueryBalances: assets_balance: Dict[Asset, Balance] = {} try: resp_info = self._api_query('get', 'user/balance') except RemoteError as e: msg = ('ICONOMI API request failed. Could not reach ICONOMI due ' 'to {}'.format(e)) log.error(msg) return None, msg if resp_info['currency'] != 'USD': raise RemoteError('Iconomi API did not return values in USD') for balance_info in resp_info['assetList']: ticker = balance_info['ticker'] try: asset = asset_from_iconomi(ticker) try: usd_value = deserialize_fval(balance_info['value'], 'usd_value', 'iconomi') except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Skipping iconomi balance entry {balance_info} due to {msg}', ) continue try: amount = deserialize_asset_amount(balance_info['balance']) except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Skipping iconomi balance entry {balance_info} due to {msg}', ) continue assets_balance[asset] = Balance( amount=amount, usd_value=usd_value, ) except (UnknownAsset, UnsupportedAsset) as e: asset_tag = 'unknown' if isinstance( e, UnknownAsset) else 'unsupported' self.msg_aggregator.add_warning( f'Found {asset_tag} ICONOMI asset {ticker}. ' f' Ignoring its balance query.', ) continue for balance_info in resp_info['daaList']: ticker = balance_info['ticker'] if ticker == 'AUSTS': # The AUSTS strategy is 'ICONOMI Earn'. We know that this strategy holds its # value in Anchor UST (AUST). That's why we report the user balance for this # strategy as usd_value / AUST price. try: aust_usd_price = Inquirer().find_usd_price(asset=A_AUST) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing ICONOMI balance entry due to inability to ' f'query USD price: {str(e)}. Skipping balance entry', ) continue if aust_usd_price == ZERO: self.msg_aggregator.add_error( 'Error processing ICONOMI balance entry because the USD price ' 'for AUST was reported as 0. Skipping balance entry', ) continue try: usd_value = deserialize_fval(balance_info['value'], 'usd_value', 'iconomi') except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Skipping iconomi balance entry {balance_info} due to {msg}', ) continue assets_balance[A_AUST] = Balance( amount=usd_value / aust_usd_price, usd_value=usd_value, ) else: self.msg_aggregator.add_warning( f'Found unsupported ICONOMI strategy {ticker}. ' f' Ignoring its balance query.', ) return assets_balance, ''