def deserialize_timestamp_from_date(date: str, formatstr: str, location: str) -> Timestamp: """Deserializes a timestamp from a date entry depending on the format str formatstr can also have a special value of 'iso8601' in which case the iso8601 function will be used. Can throw DeserializationError if the data is not as expected """ if not date: raise DeserializationError( f'Failed to deserialize a timestamp from a null entry in {location}', ) if not isinstance(date, str): raise DeserializationError( f'Failed to deserialize a timestamp from a {type(date)} entry in {location}', ) if formatstr == 'iso8601': return iso8601ts_to_timestamp(date) try: return Timestamp(create_timestamp(datestr=date, formatstr=formatstr)) except ValueError: raise DeserializationError( f'Failed to deserialize {date} {location} timestamp entry')
def convert_transaction_from_covalent( data: Dict[str, Any], ) -> CovalentTransaction: """Reads dict data of a transaction from Covalent and deserializes it Can raise DeserializationError if something is wrong """ try: timestamp = create_timestamp( datestr=data['block_signed_at'], formatstr=DATE_FORMAT_COVALENT, ) # TODO input and nonce is decoded in Covalent api, encoded in future return CovalentTransaction( timestamp=timestamp, block_number=data['block_height'], tx_hash=data['tx_hash'], from_address=data['from_address'], to_address=data['to_address'], value=read_integer(data, 'value', DEFAULT_API), gas=read_integer(data, 'gas_offered', DEFAULT_API), gas_price=read_integer(data, 'gas_price', DEFAULT_API), gas_used=read_integer(data, 'gas_spent', DEFAULT_API), input_data='0x', nonce=0, ) except KeyError as e: raise DeserializationError( f'Covalent avalanche transaction missing expected key {str(e)}', ) from e
def between_date(value: Dict) -> bool: try: v_timestamp = create_timestamp( datestr=value['block_signed_at'], formatstr=DATE_FORMAT_COVALENT, ) return from_ts <= v_timestamp <= to_ts # type: ignore except KeyError: return False
def _post_process(before: Dict) -> Dict: """Poloniex uses datetimes so turn them into timestamps here""" after = before if ('return' in after): if (isinstance(after['return'], list)): for x in range(0, len(after['return'])): if (isinstance(after['return'][x], dict)): if ('datetime' in after['return'][x] and 'timestamp' not in after['return'][x]): after['return'][x]['timestamp'] = float( create_timestamp(after['return'][x]['datetime']), ) return after
def assert_csv_export_response(response, csv_dir, is_download=False): if is_download: assert response.status_code == HTTPStatus.OK else: assert_proper_response(response) data = response.json() assert data['message'] == '' assert data['result'] is True # and check the csv files were generated succesfully. Here we are only checking # for valid CSV and not for the values to be valid. Valid values are tested # in unit/test_accounting.py with open(os.path.join(csv_dir, FILENAME_ALL_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 12 assert row['location'] in ( 'kraken', 'bittrex', 'binance', 'poloniex', 'blockchain', 'bitmex', ) assert row['type'] in ( str(AccountingEventType.TRADE), str(AccountingEventType.FEE), str(AccountingEventType.ASSET_MOVEMENT), str(AccountingEventType.PREFORK_ACQUISITION), str(AccountingEventType.TRANSACTION_EVENT), str(AccountingEventType.MARGIN_POSITION), str(AccountingEventType.LOAN), ) assert create_timestamp(row['timestamp'], '%d/%m/%Y %H:%M:%S') > 0 assert row['notes'] is not None assert row['asset'] is not None assert row['taxable_amount'] is not None assert row['free_amount'] is not None assert row['price'] is not None assert row['pnl_taxable'] is not None assert row['cost_basis_taxable'] is not None assert row['pnl_free'] is not None assert row['cost_basis_free'] is not None count += 1 assert count == 47
def from_otc_trade(trade: Dict[str, Any]) -> Dict[str, Any]: ts = create_timestamp(trade['otc_timestamp'], formatstr='%d/%m/%Y %H:%M') new_trade = { 'timestamp': ts, 'location': 'external', 'pair': trade['otc_pair'], 'trade_type': trade['otc_type'], 'amount': str(trade['otc_amount']), 'rate': str(trade['otc_rate']), 'fee': str(trade['otc_fee']), 'fee_currency': trade['otc_fee_currency'], 'link': trade['otc_link'], 'notes': trade['otc_notes'], } if 'otc_id' in trade: new_trade['id'] = trade['otc_id'] return new_trade
def deserialize_timestamp_from_date(date: str, formatstr: str, location: str) -> Timestamp: """Deserializes a timestamp from a date entry depending on the format str Can throw DeserializationError if the data is not as expected """ if not date: raise DeserializationError( f'Failed to deserialize a timestamp from a null entry in {location}', ) if not isinstance(date, str): raise DeserializationError( f'Failed to deserialize a timestamp from a {type(date)} entry in {location}', ) try: return Timestamp(create_timestamp(datestr=date, formatstr=formatstr)) except ValueError: raise DeserializationError(f'Failed to deserialize {date} {location} timestamp entry')
def deserialize_timestamp_from_date( date: Optional[str], formatstr: str, location: str, skip_milliseconds: bool = False, ) -> Timestamp: """Deserializes a timestamp from a date entry depending on the format str formatstr can also have a special value of 'iso8601' in which case the iso8601 function will be used. Can throw DeserializationError if the data is not as expected """ if not date: raise DeserializationError( f'Failed to deserialize a timestamp from a null entry in {location}', ) if not isinstance(date, str): raise DeserializationError( f'Failed to deserialize a timestamp from a {type(date)} entry in {location}', ) if skip_milliseconds: # Seems that poloniex added milliseconds in their timestamps. # https://github.com/rotki/rotki/issues/1631 # We don't deal with milliseconds in rotki times so we can safely remove it splits = date.split('.', 1) if len(splits) == 2: date = splits[0] if formatstr == 'iso8601': return iso8601ts_to_timestamp(date) date = date.rstrip('Z') try: return Timestamp(create_timestamp(datestr=date, formatstr=formatstr)) except ValueError as e: raise DeserializationError( f'Failed to deserialize {date} {location} timestamp entry', ) from e
def get_transaction_receipt(self, tx_hash: str) -> Optional[Dict[str, Any]]: """Gets the receipt for the given transaction hash May raise: - RemoteError due to self._query(). """ result = self._query( module=tx_hash, action='transaction_v2', ) if result is None: return result try: transaction_receipt = result['data']['items'][0] transaction_receipt['timestamp'] = create_timestamp( datestr=transaction_receipt['block_signed_at'], formatstr=DATE_FORMAT_COVALENT, ) return transaction_receipt except KeyError: return None
def __new__( cls, data_directory: Path = None, history_date_start: str = None, cryptocompare: 'Cryptocompare' = None, ) -> 'PriceHistorian': if PriceHistorian.__instance is not None: return PriceHistorian.__instance assert data_directory, 'arguments should be given at the first instantiation' assert history_date_start, 'arguments should be given at the first instantiation' assert cryptocompare, 'arguments should be given at the first instantiation' PriceHistorian.__instance = object.__new__(cls) # get the start date for historical data PriceHistorian._historical_data_start = create_timestamp( datestr=history_date_start, formatstr="%d/%m/%Y", ) PriceHistorian._cryptocompare = cryptocompare return PriceHistorian.__instance
def timerange_check( asset_symbol: str, our_asset: Dict[str, Any], our_data: Dict[str, Any], paprika_data: Dict[str, Any], cmc_data: Dict[str, Any], always_keep_our_time: bool, token_address: EthAddress = None, ) -> Dict[str, Any]: """Process the started timestamps from coin paprika and coinmarketcap. Then compare to our data and provide choices to clean up the data. """ if Asset(asset_symbol).is_fiat(): # Fiat does not have started date (or we don't care about it) return our_data paprika_started = None if paprika_data: paprika_started = paprika_data['started_at'] cmc_started = None if cmc_data: cmc_started = cmc_data['first_historical_data'] if not cmc_started and not paprika_started and not token_address: print(f'Did not find a started date for asset {asset_symbol} in any of the external APIs') return our_data paprika_started_ts = None if paprika_started: paprika_started_ts = create_timestamp(paprika_started, formatstr='%Y-%m-%dT%H:%M:%SZ') cmc_started_ts = None if cmc_data: cmc_started_ts = iso8601ts_to_timestamp(cmc_started) if asset_symbol in PREFER_OUR_STARTED: assert 'started' in our_asset # Already manually checked return our_data our_started = our_asset.get('started', None) # if it's an eth token entry, get the contract creation time too if token_address: contract_creation_ts = get_token_contract_creation_time(token_address) if not our_started: # If we don't have any data and CMC and paprika agree just use their timestamp if cmc_started == paprika_started and cmc_started is not None: our_data[asset_symbol]['started'] = cmc_started return our_data if our_started and always_keep_our_time: return our_data if our_started is None or our_started != cmc_started or our_started != paprika_started: choices = (1, 2, 3) msg = ( f'For asset {asset_symbol} the started times are: \n' f'(1) Our data: {our_started} -- {timestamp_to_date(our_started) if our_started else ""}\n' f'(2) Coinpaprika: {paprika_started_ts} -- ' f'{timestamp_to_date(paprika_started_ts) if paprika_started_ts else ""}\n' f'(3) Coinmarketcap: {cmc_started_ts} -- ' f'{timestamp_to_date(cmc_started_ts) if cmc_started_ts else ""} \n' ) if token_address: msg += ( f'(4) Contract creation: {contract_creation_ts} -- ' f'{timestamp_to_date(contract_creation_ts) if contract_creation_ts else ""}\n' ) choices = (1, 2, 3, 4) msg += f'Choose a number (1)-({choices[-1]}) to choose which timestamp to use: ' choice = choose_multiple(msg, choices) if choice == 1: if not our_started: print('Chose our timestamp but we got no timestamp. Bailing ...') sys.exit(1) timestamp = our_started elif choice == 2: if not paprika_started_ts: print("Chose coin paprika's timestamp but it's empty. Bailing ...") sys.exit(1) timestamp = paprika_started_ts elif choice == 3: if not cmc_started_ts: print("Chose coinmarketcap's timestamp but it's empty. Bailing ...") sys.exit(1) timestamp = cmc_started_ts elif choice == 4: if not contract_creation_ts: print("Chose contract creation timestamp but it's empty. Bailing ...") sys.exit(1) timestamp = contract_creation_ts our_data[asset_symbol]['started'] = timestamp return our_data
def query_historical_price( self, from_asset: Asset, to_asset: Asset, timestamp: Timestamp, ) -> Price: vs_currency = Coingecko.check_vs_currencies( from_asset=from_asset, to_asset=to_asset, location='historical price', ) if not vs_currency: return Price(ZERO) try: from_coingecko_id = from_asset.to_coingecko() except UnsupportedAsset: log.warning( f'Tried to query coingecko historical price from {from_asset.identifier} ' f'to {to_asset.identifier}. But from_asset is not supported in coingecko', ) return Price(ZERO) # check DB cache price_cache_entry = GlobalDBHandler().get_historical_price( from_asset=from_asset, to_asset=to_asset, timestamp=timestamp, max_seconds_distance=DAY_IN_SECONDS, source=HistoricalPriceOracle.COINGECKO, ) if price_cache_entry: return price_cache_entry.price # no cache, query coingecko for daily price date = timestamp_to_date(timestamp, formatstr='%d-%m-%Y') result = self._query( module='coins', subpath=f'{from_coingecko_id}/history', options={ 'date': date, 'localization': False, }, ) try: price = Price( FVal(result['market_data']['current_price'][vs_currency])) except KeyError as e: log.warning( f'Queried coingecko historical price from {from_asset.identifier} ' f'to {to_asset.identifier}. But got key error for {str(e)} when ' f'processing the result.', ) return Price(ZERO) # save result in the DB and return date_timestamp = create_timestamp(date, formatstr='%d-%m-%Y') GlobalDBHandler().add_historical_prices(entries=[ HistoricalPrice( from_asset=from_asset, to_asset=to_asset, source=HistoricalPriceOracle.COINGECKO, timestamp=date_timestamp, price=price, ) ]) return price
def assert_csv_export_response(response, profit_currency, csv_dir, is_download=False): if is_download: assert response.status_code == HTTPStatus.OK else: assert_proper_response(response) data = response.json() assert data['message'] == '' assert data['result'] is True # and check the csv files were generated succesfully. Here we are only checking # for valid CSV and not for the values to be valid. # TODO: In the future make a test that checks the values are also valid with open(os.path.join(csv_dir, FILENAME_TRADES_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert_csv_formulas_trades(row, profit_currency) assert len(row) == 19 assert row['location'] in ('kraken', 'bittrex', 'binance', 'poloniex') assert row['type'] in ('buy', 'sell') assert row['asset'] is not None assert FVal(row[f'fee_in_{profit_currency.identifier}']) >= ZERO assert FVal(row[f'price_in_{profit_currency.identifier}']) >= ZERO assert FVal( row[f'fee_in_{profit_currency.identifier}']) is not None assert FVal(row[f'gained_or_invested_{profit_currency.identifier}'] ) is not None assert FVal(row['amount']) > ZERO assert row['taxable_amount'] is not None assert row['exchanged_for'] is not None key = f'exchanged_asset_{profit_currency.identifier}_exchange_rate' assert FVal(row[key]) >= ZERO key = f'taxable_bought_cost_in_{profit_currency.identifier}' assert row[key] is not None assert FVal( row[f'taxable_gain_in_{profit_currency.identifier}']) >= ZERO assert row[ f'taxable_profit_loss_in_{profit_currency.identifier}'] is not None assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['cost_basis'] is not None assert row['is_virtual'] in ('True', 'False') assert row[ f'total_bought_cost_in_{profit_currency.identifier}'] is not None assert 'link' in row and 'notes' in row count += 1 num_trades = 18 assert count == num_trades, f'Incorrect amount of trade CSV entries found. {count}' with open(os.path.join(csv_dir, FILENAME_LOAN_PROFITS_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 9 assert row['location'] == 'poloniex' assert create_timestamp(row['open_time'], '%d/%m/%Y %H:%M:%S') > 0 assert create_timestamp(row['close_time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['gained_asset'] is not None assert FVal(row['gained_amount']) > ZERO assert FVal(row['lent_amount']) > ZERO assert FVal(row[f'profit_in_{profit_currency.identifier}']) > ZERO count += 1 num_loans = 2 assert count == num_loans, 'Incorrect amount of loans CSV entries found' with open(os.path.join(csv_dir, FILENAME_ASSET_MOVEMENTS_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 8 assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['exchange'] in [str(x) for x in SUPPORTED_EXCHANGES] assert row['type'] in ('deposit', 'withdrawal') assert row['moving_asset'] is not None assert FVal(row['fee_in_asset']) >= ZERO assert FVal(row[f'fee_in_{profit_currency.identifier}']) >= ZERO assert row['link'] != '' assert row['notes'] == '' count += 1 num_asset_movements = 13 assert count == num_asset_movements, f'Incorrect amount of asset movement CSV entries found {count}' # noqa: E501 with open(os.path.join(csv_dir, FILENAME_GAS_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 4 assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['transaction_hash'] is not None assert FVal(row['eth_burned_as_gas']) > ZERO assert FVal(row[f'cost_in_{profit_currency.identifier}']) > ZERO count += 1 num_transactions = 3 assert count == num_transactions, 'Incorrect amount of transaction costs CSV entries found' with open(os.path.join(csv_dir, FILENAME_MARGIN_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 8 assert row['location'] == 'bitmex' assert row['name'] is not None assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['gain_loss_asset'] is not None assert FVal(row['gain_loss_amount']) is not None assert FVal(row[f'profit_loss_in_{profit_currency.identifier}'] ) is not None assert 'link' in row and 'notes' in row count += 1 num_margins = 2 assert count == num_margins, 'Incorrect amount of margin CSV entries found' # None of this in the current history. TODO: add and also test formula # with open(os.path.join(csv_dir, FILENAME_LOAN_SETTLEMENTS_CSV), newline='') as csvfile: # reader = csv.DictReader(csvfile) # count = 0 # for row in reader: # assert len(row) == 6 # assert Asset(row['asset']).identifier is not None # assert FVal(row['amount']) is not None # assert FVal(row[f'price_in_{profit_currency.identifier}']) > ZERO # assert FVal(row[f'fee_in_{profit_currency.identifier}']) >= ZERO # assert row[f'loss_in_{profit_currency.identifier}'] is not None # assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 # count += 1 # num_loan_settlements = 2 # assert count == num_loan_settlements, 'Incorrect amount of loan settlement CSV entries found' with open(os.path.join(csv_dir, FILENAME_ALL_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert_csv_formulas_all_events(row, profit_currency) assert len(row) == 18 assert row['location'] in ( 'kraken', 'bittrex', 'binance', 'poloniex', 'blockchain', 'bitmex', ) assert row['type'] in ( 'buy', 'sell', 'asset_movement', 'tx_gas_cost', 'interest_rate_payment', 'margin_position_close', ) assert row['paid_asset'] is not None assert FVal(row['paid_in_asset']) >= ZERO assert row['taxable_amount'] is not None assert row['received_asset'] is not None assert FVal(row['received_in_asset']) >= ZERO assert row['net_profit_or_loss'] is not None assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['is_virtual'] in ('True', 'False') assert FVal(row[f'paid_in_{profit_currency.identifier}']) >= ZERO assert row[ f'taxable_received_in_{profit_currency.identifier}'] is not None assert row[ f'taxable_bought_cost_in_{profit_currency.identifier}'] is not None assert row['cost_basis'] is not None assert row[ f'total_bought_cost_in_{profit_currency.identifier}'] is not None assert row[ f'total_received_in_{profit_currency.identifier}'] is not None assert 'link' in row and 'notes' in row count += 1 assert count == (num_trades + num_loans + num_asset_movements + num_transactions + num_margins)
def assert_csv_export_response(response, profit_currency, csv_dir): assert_proper_response(response) data = response.json() assert data['message'] == '' assert data['result'] is True # and check the csv files were generated succesfully. Here we are only checking # for valid CSV and not for the values to be valid. # TODO: In the future make a test that checks the values are also valid with open(os.path.join(csv_dir, FILENAME_TRADES_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 14 assert row['type'] in ('buy', 'sell') assert Asset(row['asset']).identifier is not None assert FVal(row[f'fee_in_{profit_currency.identifier}']) >= ZERO assert FVal(row[f'price_in_{profit_currency.identifier}']) >= ZERO assert FVal( row[f'fee_in_{profit_currency.identifier}']) is not None assert FVal(row[f'gained_or_invested_{profit_currency.identifier}'] ) is not None assert FVal(row['amount']) > ZERO assert row['taxable_amount'] is not None assert Asset(row['exchanged_for']).identifier is not None key = f'exchanged_asset_{profit_currency.identifier}_exchange_rate' assert FVal(row[key]) >= ZERO key = f'taxable_bought_cost_in_{profit_currency.identifier}' assert row[key] is not None assert FVal( row[f'taxable_gain_in_{profit_currency.identifier}']) >= ZERO assert row[ f'taxable_profit_loss_in_{profit_currency.identifier}'] is not None assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['is_virtual'] in ('True', 'False') count += 1 num_trades = 18 assert count == num_trades, 'Incorrect amount of trade CSV entries found' with open(os.path.join(csv_dir, FILENAME_LOAN_PROFITS_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 6 assert create_timestamp(row['open_time'], '%d/%m/%Y %H:%M:%S') > 0 assert create_timestamp(row['close_time'], '%d/%m/%Y %H:%M:%S') > 0 assert Asset(row['gained_asset']).identifier is not None assert FVal(row['gained_amount']) > ZERO assert FVal(row['lent_amount']) > ZERO assert FVal(row[f'profit_in_{profit_currency.identifier}']) > ZERO count += 1 num_loans = 2 assert count == num_loans, 'Incorrect amount of loans CSV entries found' with open(os.path.join(csv_dir, FILENAME_ASSET_MOVEMENTS_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 6 assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['exchange'] in SUPPORTED_EXCHANGES assert row['type'] in ('deposit', 'withdrawal') assert Asset(row['moving_asset']).identifier is not None assert FVal(row['fee_in_asset']) >= ZERO assert FVal(row[f'fee_in_{profit_currency.identifier}']) >= ZERO count += 1 num_asset_movements = 11 assert count == num_asset_movements, 'Incorrect amount of asset movement CSV entries found' with open(os.path.join(csv_dir, FILENAME_GAS_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 4 assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['transaction_hash'] is not None assert FVal(row['eth_burned_as_gas']) > ZERO assert FVal(row[f'cost_in_{profit_currency.identifier}']) > ZERO count += 1 num_transactions = 3 assert count == num_transactions, 'Incorrect amount of transaction costs CSV entries found' with open(os.path.join(csv_dir, FILENAME_MARGIN_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 5 assert row['name'] is not None assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert Asset(row['gain_loss_asset']).identifier is not None assert FVal(row['gain_loss_amount']) is not None assert FVal(row[f'profit_loss_in_{profit_currency.identifier}'] ) is not None count += 1 num_margins = 2 assert count == num_margins, 'Incorrect amount of margin CSV entries found' # None of this in the current history. TODO: add # with open(os.path.join(csv_dir, FILENAME_LOAN_SETTLEMENTS_CSV), newline='') as csvfile: # reader = csv.DictReader(csvfile) # count = 0 # for row in reader: # assert len(row) == 6 # assert Asset(row['asset']).identifier is not None # assert FVal(row['amount']) is not None # assert FVal(row[f'price_in_{profit_currency.identifier}']) > ZERO # assert FVal(row[f'fee_in_{profit_currency.identifier}']) >= ZERO # assert row[f'loss_in_{profit_currency.identifier}'] is not None # assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 # count += 1 # num_loan_settlements = 2 # assert count == num_loan_settlements, 'Incorrect amount of loan settlement CSV entries found' with open(os.path.join(csv_dir, FILENAME_ALL_CSV), newline='') as csvfile: reader = csv.DictReader(csvfile) count = 0 for row in reader: assert len(row) == 12 assert row['type'] in ( 'buy', 'sell', 'asset_movement', 'tx_gas_cost', 'interest_rate_payment', 'margin_position_close', ) paid_asset = row['paid_asset'] if paid_asset != '': assert Asset(paid_asset).identifier is not None assert FVal(row['paid_in_asset']) >= ZERO assert row['taxable_amount'] is not None received_asset = row['received_asset'] if received_asset != '': assert Asset(received_asset).identifier is not None assert FVal(row['received_in_asset']) >= ZERO assert row['net_profit_or_loss'] is not None assert create_timestamp(row['time'], '%d/%m/%Y %H:%M:%S') > 0 assert row['is_virtual'] in ('True', 'False') assert FVal(row[f'paid_in_{profit_currency.identifier}']) >= ZERO assert row[ f'taxable_received_in_{profit_currency.identifier}'] is not None assert row[ f'taxable_bought_cost_in_{profit_currency.identifier}'] is not None count += 1 assert count == (num_trades + num_loans + num_asset_movements + num_transactions + num_margins)
def get_transactions( self, account: ChecksumEthAddress, from_ts: Optional[Timestamp] = None, to_ts: Optional[Timestamp] = None, ) -> Optional[List[CovalentTransaction]]: """Gets a list of transactions for account. - account is address for wallet. - to_ts is latest date. - from_ts is oldest date. May raise: - RemoteError due to self._query(). Also if the returned result is not in the expected format """ if from_ts is None: from_ts = Timestamp(0) if to_ts is None: to_ts = ts_now() result_master = [] i = 0 options = {'page-number': i, 'page-size': PAGESIZE} while True: result = self._query( module='transactions_v2', address=account, action='address', options=options, ) if result is None: return result try: result_master += result['data']['items'] if result['data']['pagination']['has_more'] is False: break if len(result['data']['items']) < PAGESIZE: break date_str = result['data']['items'][-1]['block_signed_at'] last_timestamp = create_timestamp( datestr=date_str, formatstr=DATE_FORMAT_COVALENT, ) except KeyError: return None if last_timestamp <= from_ts: break i += 1 options = {'page-number': i, 'page-size': PAGESIZE} def between_date(value: Dict) -> bool: try: v_timestamp = create_timestamp( datestr=value['block_signed_at'], formatstr=DATE_FORMAT_COVALENT, ) return from_ts <= v_timestamp <= to_ts # type: ignore except KeyError: return False list_transactions = list(filter(between_date, result_master)) transactions = [] for transaction in list_transactions: try: tx = convert_transaction_from_covalent(transaction) except DeserializationError as e: self.msg_aggregator.add_warning(f'{str(e)}. Skipping transaction') continue transactions.append(tx) return transactions
def _scrape_validator_daily_stats( validator_index: int, last_known_timestamp: Timestamp, msg_aggregator: MessagesAggregator, ) -> List[ValidatorDailyStats]: """Scrapes the website of beaconcha.in and parses the data directly out of the data table. The parser is very simple. And can break if they change stuff in the way it's displayed in https://beaconcha.in/validator/33710/stats. If that happpens we need to adjust here. If we also somehow programatically get the data in a CSV that would be swell. May raise: - RemoteError if we can't query beaconcha.in or if the data is not in the expected format """ url = f'https://beaconcha.in/validator/{validator_index}/stats' log.debug(f'Querying beaconchain stats: {url}') try: response = requests.get(url) except requests.exceptions.RequestException as e: raise RemoteError( f'Beaconcha.in api request {url} failed due to {str(e)}') from e if response.status_code != 200: raise RemoteError( f'Beaconcha.in api request {url} failed with code: {response.status_code}' f' and response: {response.text}', ) soup = BeautifulSoup(response.text, 'html.parser', parse_only=SoupStrainer('tbod')) if soup is None: raise RemoteError( 'Could not find <tbod> while parsing beaconcha.in stats page') try: tr = soup.tbod.tr except AttributeError as e: raise RemoteError( 'Could not find first <tr> while parsing beaconcha.in stats page' ) from e timestamp = Timestamp(0) pnl = ZERO start_amount = ZERO end_amount = ZERO missed_attestations = 0 orphaned_attestations = 0 proposed_blocks = 0 missed_blocks = 0 orphaned_blocks = 0 included_attester_slashings = 0 proposer_attester_slashings = 0 deposits_number = 0 amount_deposited = ZERO column_pos = 1 stats: List[ValidatorDailyStats] = [] while tr is not None: for column in tr.children: if column.name != 'td': continue if column_pos == 1: # date date = column.string try: timestamp = create_timestamp(date, formatstr='%d %b %Y') except ValueError as e: raise RemoteError( f'Failed to parse {date} to timestamp') from e if timestamp <= last_known_timestamp: return stats # we are done column_pos += 1 elif column_pos == 2: pnl = _parse_fval(column.string, 'income') column_pos += 1 elif column_pos == 3: start_amount = _parse_fval(column.string, 'start amount') column_pos += 1 elif column_pos == 4: end_amount = _parse_fval(column.string, 'end amount') column_pos += 1 elif column_pos == 5: missed_attestations = _parse_int(column.string, 'missed attestations') column_pos += 1 elif column_pos == 6: orphaned_attestations = _parse_int(column.string, 'orphaned attestations') column_pos += 1 elif column_pos == 7: proposed_blocks = _parse_int(column.string, 'proposed blocks') column_pos += 1 elif column_pos == 8: missed_blocks = _parse_int(column.string, 'missed blocks') column_pos += 1 elif column_pos == 9: orphaned_blocks = _parse_int(column.string, 'orphaned blocks') column_pos += 1 elif column_pos == 10: included_attester_slashings = _parse_int( column.string, 'included attester slashings') # noqa: E501 column_pos += 1 elif column_pos == 11: proposer_attester_slashings = _parse_int( column.string, 'proposer attester slashings') # noqa: E501 column_pos += 1 elif column_pos == 12: deposits_number = _parse_int(column.string, 'deposits number') column_pos += 1 elif column_pos == 13: amount_deposited = _parse_fval(column.string, 'amount deposited') column_pos += 1 column_pos = 1 prices = [ query_usd_price_zero_if_error( A_ETH, time=time, location='eth2 staking daily stats', msg_aggregator=msg_aggregator, ) for time in (timestamp, Timestamp(timestamp + DAY_IN_SECONDS)) ] stats.append( ValidatorDailyStats( validator_index=validator_index, timestamp=timestamp, start_usd_price=prices[0], end_usd_price=prices[1], pnl=pnl, start_amount=start_amount, end_amount=end_amount, missed_attestations=missed_attestations, orphaned_attestations=orphaned_attestations, proposed_blocks=proposed_blocks, missed_blocks=missed_blocks, orphaned_blocks=orphaned_blocks, included_attester_slashings=included_attester_slashings, proposer_attester_slashings=proposer_attester_slashings, deposits_number=deposits_number, amount_deposited=amount_deposited, )) tr = tr.find_next_sibling() return stats
def verify_otctrade_data( data: ExternalTrade, ) -> Tuple[Optional[Trade], str]: """ Takes in the trade data dictionary, validates it and returns a trade instance If there is an error it returns an error message in the second part of the tuple """ for field in otc_fields: if field not in data: return None, f'{field} was not provided' if data[field] in ('', None) and field not in otc_optional_fields: return None, f'{field} was empty' if field in otc_numerical_fields and not is_number(data[field]): return None, f'{field} should be a number' # Satisfy mypy typing assert isinstance(data['otc_pair'], str) assert isinstance(data['otc_fee_currency'], str) assert isinstance(data['otc_fee'], str) pair = TradePair(data['otc_pair']) try: first = get_pair_position_asset(pair, 'first') second = get_pair_position_asset(pair, 'second') fee_currency = Asset(data['otc_fee_currency']) except UnknownAsset as e: return None, f'Provided asset {e.asset_name} is not known to Rotkehlchen' try: trade_type = deserialize_trade_type(str(data['otc_type'])) amount = deserialize_asset_amount(data['otc_amount']) rate = deserialize_price(data['otc_rate']) fee = deserialize_fee(data['otc_fee']) except DeserializationError as e: return None, f'Deserialization Error: {str(e)}' try: assert isinstance(data['otc_timestamp'], str) timestamp = create_timestamp(data['otc_timestamp'], formatstr='%d/%m/%Y %H:%M') except ValueError as e: return None, f'Could not process the given datetime: {e}' log.debug( 'Creating OTC trade data', sensitive_log=True, pair=pair, trade_type=trade_type, amount=amount, rate=rate, fee=fee, fee_currency=fee_currency, ) if data['otc_fee_currency'] not in (first, second): return None, 'Trade fee currency should be one of the two in the currency pair' if data['otc_type'] not in ('buy', 'sell'): return None, 'Trade type can only be buy or sell' trade = Trade( timestamp=timestamp, location='external', pair=pair, trade_type=trade_type, amount=amount, rate=rate, fee=fee, fee_currency=fee_currency, link=str(data['otc_link']), notes=str(data['otc_notes']), ) return trade, ''