def query_missing_prices_of_base_entries( self, entries_missing_prices: List[Tuple[str, FVal, Asset, Timestamp]], ) -> None: """Queries missing prices for HistoryBaseEntry in database updating the price if it is found. Otherwise we add the id to the ignore list for this session. """ inquirer = PriceHistorian() updates = [] for identifier, amount, asset, timestamp in entries_missing_prices: try: price = inquirer.query_historical_price( from_asset=asset, to_asset=A_USD, timestamp=timestamp, ) except (NoPriceForGivenTimestamp, RemoteError) as e: log.error( f'Failed to find price for {asset} at {timestamp} in base ' f'entry {identifier}. {str(e)}.', ) self.base_entries_ignore_set.add(identifier) continue usd_value = amount * price updates.append((str(usd_value), identifier)) query = 'UPDATE history_events SET usd_value=? WHERE rowid=?' cursor = self.database.conn.cursor() cursor.executemany(query, updates) self.database.update_last_write()
def quote(asset: Tuple[str], amount: float, base_asset_: Optional[str], timestamp: Optional[str]): buchfink_db = BuchfinkDB() buchfink_db.perform_assets_updates() base_asset = buchfink_db.get_asset_by_symbol(base_asset_) \ if base_asset_ \ else buchfink_db.get_main_currency() base_in_usd = FVal(buchfink_db.inquirer.find_usd_price(base_asset)) a_usd = buchfink_db.get_asset_by_symbol('USD') ds_timestamp = deserialize_timestamp(timestamp) if timestamp else None historian = PriceHistorian() for symbol in asset: asset_ = buchfink_db.get_asset_by_symbol(symbol) if ds_timestamp: asset_usd = historian.query_historical_price( from_asset=asset_, to_asset=a_usd, timestamp=ds_timestamp ) else: asset_usd = FVal(buchfink_db.inquirer.find_usd_price(asset_)) click.echo('{} {} = {} {}'.format( click.style(f'{amount}', fg='white'), click.style(asset_.symbol, fg='green'), click.style(f'{FVal(amount) * asset_usd / base_in_usd}', fg='white'), click.style(base_asset.symbol, fg='green') ))
def get_main_currency_price(self, timestamp: Timestamp) -> Tuple[Asset, Price]: """Gets the main currency and its equivalent price at a particular timestamp.""" main_currency = self.db.get_main_currency() main_currency_price = None try: main_currency_price = PriceHistorian.query_historical_price( from_asset=A_USD, to_asset=main_currency, timestamp=timestamp, ) except NoPriceForGivenTimestamp: main_currency_price = Price(ONE) self.msg_aggregator.add_error( f'Could not find price for timestamp {timestamp}. Using USD for export. ' f'Please add manual price from USD to your main currency {main_currency}', ) return main_currency, main_currency_price
def process_trades( db: DBHandler, timestamp: Timestamp, data: List[BinanceCsvRow], ) -> List[Trade]: """Processes multiple rows data and stores it into rotki's trades Each row has format: {'Operation': ..., 'Change': ..., 'Coin': ...} Change is amount, Coin is asset If amount is negative then this asset is sold, otherwise it's bought """ # Because we can get mixed data (e.g. multiple Buys or Sells on a single timestamp) we need # to group it somehow. We are doing it by grouping the highest bought with the highest # sold value. We query usd equivalent for each amount because different Sells / Buys # may use different assets. # If we query price for the first time it can take long, so we would like to avoid it, # and therefore we check if all Buys / Sells use the same asset. # If so, we can group by original amount. # Checking assets same_assets = True assets: Dict[str, Optional[Asset]] = defaultdict(lambda: None) for row in data: if row['Operation'] == 'Fee': cur_operation = 'Fee' elif row['Change'] < 0: cur_operation = 'Sold' else: cur_operation = 'Bought' assets[cur_operation] = assets[cur_operation] or row['Coin'] if assets[cur_operation] != row['Coin']: same_assets = False break # Querying usd value if needed if same_assets is False: for row in data: try: price = PriceHistorian.query_historical_price( from_asset=row['Coin'], to_asset=A_USD, timestamp=timestamp, ) except NoPriceForGivenTimestamp: # If we can't find price we can't group, so we quit the method log.warning(f'Couldn\'t find price of {row["Coin"]} on {timestamp}') return [] row['usd_value'] = row['Change'] * price # Group rows depending on whether they are fee or not and then sort them by amount rows_grouped_by_fee: Dict[bool, List[BinanceCsvRow]] = defaultdict(list) for row in data: is_fee = row['Operation'] == 'Fee' rows_grouped_by_fee[is_fee].append(row) for rows_group in rows_grouped_by_fee.values(): rows_group.sort(key=lambda x: x['Change'] if same_assets else x['usd_value'], reverse=True) # noqa: E501 # Grouping by combining the highest sold with the highest bought and the highest fee # Using fee only we were provided with fee (checking by "True in rows_by_operation") grouped_trade_rows = [] while len(rows_grouped_by_fee[False]) > 0: cur_batch = [rows_grouped_by_fee[False].pop(), rows_grouped_by_fee[False].pop(0)] if True in rows_grouped_by_fee: cur_batch.append(rows_grouped_by_fee[True].pop()) grouped_trade_rows.append(cur_batch) # Creating trades structures based on grouped rows data raw_trades: List[Trade] = [] for trade_rows in grouped_trade_rows: to_asset: Optional[Asset] = None to_amount: Optional[AssetAmount] = None from_asset: Optional[Asset] = None from_amount: Optional[AssetAmount] = None fee_asset: Optional[Asset] = None fee_amount: Optional[Fee] = None trade_type: Optional[TradeType] = None for row in trade_rows: cur_asset = row['Coin'] amount = row['Change'] if row['Operation'] == 'Fee': fee_asset = cur_asset fee_amount = Fee(amount) else: trade_type = TradeType.SELL if row['Operation'] == 'Sell' else TradeType.BUY # noqa: E501 if amount < 0: from_asset = cur_asset from_amount = AssetAmount(-amount) else: to_asset = cur_asset to_amount = amount # Validate that we have received proper assets and amounts. # There can be no fee, so we don't validate it if ( to_asset is None or from_asset is None or trade_type is None or to_amount is None or to_amount == ZERO or from_amount is None or from_amount == ZERO ): log.warning( f'Skipped binance rows {data} because ' f'it didn\'t have enough data', ) db.msg_aggregator.add_warning('Skipped some rows because couldn\'t find amounts or it was zero') # noqa: E501 continue rate = to_amount / from_amount trade = Trade( timestamp=timestamp, location=Location.BINANCE, trade_type=trade_type, base_asset=to_asset, quote_asset=from_asset, amount=to_amount, rate=Price(rate), fee_currency=fee_asset, fee=fee_amount, link='', notes='Imported from binance CSV file. Binance operation: Buy / Sell', ) raw_trades.append(trade) # Sometimes we can get absolutely identical trades (including timestamp) but the database # allows us to add only one of them. So we combine these trades into a huge single trade # First step: group trades grouped_trades: Dict[TradeID, List[Trade]] = defaultdict(list) for trade in raw_trades: grouped_trades[trade.identifier].append(trade) # Second step: combine them unique_trades = [] for trades_group in grouped_trades.values(): result_trade = trades_group[0] for trade in trades_group[1:]: result_trade.amount = AssetAmount(result_trade.amount + trade.amount) # noqa: E501 if result_trade.fee is not None and trade.fee is not None: result_trade.fee = Fee(result_trade.fee + trade.fee) unique_trades.append(result_trade) return unique_trades