def Price(price): meta = {} date = price.date currency = commodity_name(price.currency) amount = data.Amount(price.value, currency) commodity = commodity_name(price.commodity) return data.Price(meta, date, commodity, amount)
def Price(price): meta = {} date = price.date currency = price.commodity.mnemonic amount = data.Amount(price.value, price.currency.mnemonic) return data.Price(meta, date, currency, amount)
def read_price_stream(stream: typing.Iterable, price_db: typing.Dict[str, dict], quote_currency: str) -> data.Entries: """Reads an iterable of tuples and compares against existing price_db Returns: list of beancount Entries """ entries = [] for row in stream: currency, _date, _amount = row[0:3] if currency not in price_db: continue dp: datetime.datetime = dateparser.parse(_date) assert dp, f"Unable to parse date {_date}" date = datetime.date(dp.year, dp.month, dp.day) amount = data.Amount(decimal.Decimal(_amount), quote_currency) history = price_db[currency] if date in history: continue entry = data.Price(date=date, currency=currency, amount=amount, meta=data.new_metadata('', 0)) entries.append(entry) return entries
def convert_price(name, currency, gnc_price): meta = {} v = gnc_price.get_value() gv = gnucash.gnucash_business.GncNumeric(instance=v) price = Converter.normalize_numeric(gv) amount = data.Amount(price, currency) date = gnc_price.get_time64().strftime('%Y-%m-%d') return data.Price(meta, date, Converter.normalize_commodity(name), amount)
def fetch_price(dprice: DatedPrice, swap_inverted: bool = False) -> Optional[data.Price]: """Fetch a price for the DatedPrice job. Args: dprice: A DatedPrice instances. swap_inverted: A boolean, true if we should invert currencies instead of rate for an inverted price source. Returns: A Price entry corresponding to the output of the jobs processed. """ for psource in dprice.sources: try: source = psource.module.Source() except AttributeError: continue srcprice = fetch_cached_price(source, psource.symbol, dprice.date) if srcprice is not None: break else: if dprice.sources: logging.error("Could not fetch for job: %s", dprice) return None base = dprice.base quote = dprice.quote or srcprice.quote_currency price = srcprice.price # Invert the rate if requested. if psource.invert: if swap_inverted: base, quote = quote, base else: price = ONE / price assert base is not None fileloc = data.new_metadata('<{}>'.format(type(psource.module).__name__), 0) # The datetime instance is required to be aware. We always convert to the # user's timezone before extracting the date. This means that if the market # returns a timestamp for a particular date, once we convert to the user's # timezone the returned date may be different by a day. The intent is that # whatever we print is assumed coherent with the user's timezone. See # discussion at # https://groups.google.com/d/msg/beancount/9j1E_HLEMBQ/fYRuCQK_BwAJ srctime = srcprice.time if srctime.tzinfo is None: raise ValueError( "Time returned by the price source is not timezone aware.") date = srctime.astimezone(tz.tzlocal()).date() return data.Price(fileloc, date, base, amount.Amount(price, quote or UNKNOWN_CURRENCY))
def render_beancount(self, entries, errors, options_map, file): dcontext = options_map['dcontext'] price_map = prices.build_price_map(entries) meta = data.new_metadata('<report_prices_db>', 0) for base_quote in price_map.forward_pairs: price_list = price_map[base_quote] base, quote = base_quote for date, price in price_list: entry = data.Price(meta, date, base, amount.Amount(price, quote)) file.write(printer.format_entry(entry, dcontext)) file.write('\n')
def build_price_map_util(date_currency_price_tuples): """Build a partial price-map just for testing. Args: date_currency_price_tuples: A list of (datetime.date, currency-string, price-Amount) tuples to fill in the database with. Returns: A price_map, as per build_price_map(). """ return prices.build_price_map([ data.Price(None, date, currency, price) for date, currency, price in date_currency_price_tuples ])
def post(self): prices = [] requestData = request.json for requestPrice in requestData['prices']: prices.append( data.Price( None, requestPrice['date'], requestPrice['quoteCurrency'], amount.Amount(D(str(requestPrice['price'])), requestPrice['baseCurrency']))) storage.setPrices(prices, requestData['filename']) return 'ok'
def extract_balances_and_prices(self, file, counter): new_entries = [] date = self.get_max_transaction_date() if date: # balance assertions are evaluated at the beginning of the date, so move it to the following day date += datetime.timedelta(days=1) else: print( "Warning: no transactions, using statement date for balance assertions." ) settlement_fund_balance = 0 for pos in self.get_balance_positions(): ticker, ticker_long_name = self.get_ticker_info(pos.security) meta = data.new_metadata(file.name, next(counter)) # if there are no transactions, use the date in the source file for the balance. This gives us the # bonus of an updated, recent balance assertion bal_date = date if date else pos.date.date() balance_entry = data.Balance( meta, bal_date, self.commodity_leaf(self.config['main_account'], ticker), amount.Amount(pos.units, ticker), None, None) new_entries.append(balance_entry) if ticker in self.money_market_funds: settlement_fund_balance = pos.units # extract price info if available if hasattr(pos, 'unit_price') and hasattr(pos, 'date'): meta = data.new_metadata(file.name, next(counter)) price_entry = data.Price( meta, pos.date.date(), ticker, amount.Amount(pos.unit_price, self.currency)) new_entries.append(price_entry) # ----------------- available cash available_cash = self.get_available_cash() if available_cash is not False: try: balance = self.get_available_cash() - settlement_fund_balance meta = data.new_metadata(file.name, next(counter)) bal_date = date if date else self.file_date(file).date() balance_entry = data.Balance( meta, bal_date, self.cash_account, amount.Amount(balance, self.currency), None, None) new_entries.append(balance_entry) except AttributeError: # self.get_available_cash() pass return new_entries
def write_price_directives(filename: str, pricer: Pricer, days_price_threshold: int): """Write a list of required price directives as a Beancount file.""" price_entries = [] for (currency, required_date), found_dates in sorted(pricer.required_prices.items()): assert len(found_dates) == 1 cost_currency, actual_date, rate = found_dates.pop() days_late = (required_date - actual_date).days if days_late < days_price_threshold: continue price = data.Price({}, required_date, currency, Amount(rate, cost_currency)) price_entries.append(price) with open_with_mkdir(filename) as prfile: printer.print_entries(price_entries, file=prfile)
def main(): """Top-level function.""" today = datetime.date.today() parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('instrument', help="Yahoo!Finance code for financial instrument.") parser.add_argument( 'start', action='store', type=lambda x: dateutil.parser.parse(x).date(), default=today.replace(year=today.year - 1), help="Start date of interval. Default is one year ago.") parser.add_argument('end', action='store', type=lambda x: dateutil.parser.parse(x).date(), default=today, help="End date of interval. Default is today ago.") args = parser.parse_args() # Get the data. source = yahoo.Source() sprices = source.get_daily_prices( args.instrument, datetime.datetime.combine(args.start, datetime.time()), datetime.datetime.combine(args.end, datetime.time())) if sprices is None: raise RuntimeError("Could not fetch from {}".format(source)) # Attempt to infer the right quantization and quantize if succesfull. quant = number.infer_quantization_from_numbers([s.price for s in sprices]) if quant: sprices = [ sprice._replace(price=sprice.price.quantize(quant)) for sprice in sprices ] # Convert to Price entries and output. price_entries = [] for sprice in sprices: price_entries.append( data.Price({}, sprice.time.date(), args.instrument, amount.Amount(sprice.price, sprice.quote_currency))) printer.print_entries(price_entries)
def main(): import argparse, logging logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount input filename') # Note: Historical data is not supported yet because Google Finance will not # serve us historical currency rates. # # parser.add_argument('-d', '--date', action='store', type=parse_date, # help="The date at which to extract the holdings") args = parser.parse_args() entries, errors, options_map = loader.load_file(args.filename) price_entries = [] commodities_list = holdings.get_commodities_at_date(entries, options_map) for currency, cost_currency, quote_currency, ticker in commodities_list: # Ignore the commodity if it has no ticker defined on it. if ticker is None: continue # Select the quote currency if declared, otherwise use the cost # currency. quote_currency = quote_currency or cost_currency # Invert the currencies if the rate is to be inverted. if ticker.startswith('1/'): ticker = ticker[2:] currency, quote_currency = quote_currency, currency price, price_time = get_google_finance_latest_price(ticker) fileloc = data.new_metadata('<fetch-prices>', 0) price_entries.append( data.Price(fileloc, price_time.date(), currency, amount.Amount(price, quote_currency))) printer.print_entries(price_entries)
def generate(entries, options_map, baseCcy): errors = [] priceMap = prices.build_price_map(entries) additionalEntries = [] for entry in entries: if isinstance(entry, data.Price) and entry.amount.currency != baseCcy: fxRate = prices.get_price(priceMap, tuple([entry.amount.currency, baseCcy]), entry.date) priceInBaseCcy = amount.Amount(entry.amount.number * fxRate[1], baseCcy) additionalEntries.append(data.Price( entry.meta, entry.date, entry.currency, priceInBaseCcy )) entries.extend(additionalEntries) return entries, errors
def extract_prices(self, statement: FlexStatement, existing_entries: list = None): """ IBFlex XML Files can contain an object called 'OpenPositions', this is very useful because it lets us create - Balance assertions - Price entries from the Mark """ result = [] for position in statement.OpenPositions: price = position.markPrice safe_symbol = self.clean_symbol(position.symbol) # Dates are 12 Midnight, let's make it the next day date = statement.toDate + timedelta(days=1) result.append( # TODO De-Duplicate data.Price(currency=safe_symbol, amount=data.Amount(price, "USD"), date=date, meta={ 'lineno': 0, 'filename': '', })) account = self.account_for_symbol(statement, position.symbol) result.append( data.Balance( account=account, amount=data.Amount(position.position * position.multiplier, safe_symbol), date=statement.toDate + timedelta(days=1), meta={ 'lineno': 0, 'filename': '' }, tolerance=0.5, diff_amount=0, )) return result
def fetch_price(dprice, swap_inverted=False): """Fetch a price for the DatePrice job. Args: dprice: A DatedPrice instances. swap_inverted: A boolean, true if we should invert currencies instead of rate for an inverted price source. Returns: A Price entry corresponding to the output of the jobs processed. """ for psource in dprice.sources: source = psource.module.Source() srcprice = fetch_cached_price(source, psource.symbol, dprice.date) if srcprice is not None: break else: if dprice.sources: logging.error("Could not fetch for job: %s", dprice) return None base = dprice.base quote = dprice.quote or srcprice.quote_currency price = srcprice.price # Invert the rate if requested. if psource.invert: if swap_inverted: base, quote = quote, base else: price = ONE / price assert base is not None fileloc = data.new_metadata('<{}>'.format(type(psource.module).__name__), 0) return data.Price(fileloc, srcprice.time.date(), base, amount.Amount(price, quote or UNKNOWN_CURRENCY))
def GetExpiredOptionsPrices(positions: JSON, balances: BalanceDict) -> List[data.Price]: """Produce zero prices for expired options, on the following day.""" # Create an options positions map. position_map = { pos['instrument']['symbol']: pos for pos in positions if pos['instrument']['assetType'] == 'OPTION' } price_entries = [] for currency, balance in balances.items(): for position in balance: currency = position.units.currency if options.IsOptionSymbol(currency): opt = options.ParseOptionSymbol(currency) fileloc = data.new_metadata('<ameritrade>', 0) # Record for the next day (we're going to do this on weekends). date = opt.expiration + datetime.timedelta(days=1) # If the position is still currently held, find the appropriate # price point from the list of positions. try: pos = position_map[currency] except KeyError: price = ZERO else: quantity = Decimal(pos['longQuantity'] - pos['shortQuantity']) price = Decimal(pos['marketValue']) / (quantity * CSIZE) price_entries.append( data.Price(fileloc, date, currency, Amount(price, USD))) return price_entries
def test_remove_account_postings(self): meta = data.new_metadata(".", 0) date = datetime.date.today() entry1 = data.Open(meta, date, 'Liabilities:US:CreditCard', None, None) entry2 = data.Open(meta, date, 'Equity:Rounding', None, None) entry3 = data.Transaction(meta, date, FLAG, None, "Something", None, None, []) data.create_simple_posting(entry3, 'Liabilities:US:CreditCard', '-50', 'USD') data.create_simple_posting(entry3, 'Equity:Rounding', '0.00123', 'USD') data.create_simple_posting(entry3, 'Expenses:Food:Restaurant', '50', 'USD') entry4 = data.Price(meta, date, 'HOOL', A('23 USD')) in_entries = [entry1, entry2, entry3, entry4] out_entries = data.remove_account_postings('Equity:Rounding', in_entries) self.assertEqual(4, len(out_entries)) self.assertEqual( ['Liabilities:US:CreditCard', 'Expenses:Food:Restaurant'], [posting.account for posting in out_entries[2].postings])
def main(): parse_date = lambda s: parse_datetime(s).date() import argparse, logging logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Ledger filename') parser.add_argument('-c', '--compress-days', action='store', type=int, default=60, help="The number of unused days to ignore.") parser.add_argument('-m', '--min-date', action='store', type=parse_date, default=None, help="The minimum date to consider") args = parser.parse_args() # Load the ledger. entries, errors, options_map = loader.load_file(args.filename) # Build a map of existing price entries. price_map = {} for entry in entries: if isinstance(entry, data.Price): key = (entry.date, entry.currency, entry.amount.currency) price_map[key] = entry # Compute the lifetimes of currencies and compress them. lifetimes_map = lifetimes.get_commodity_lifetimes(entries) lifetimes_map = lifetimes.compress_lifetimes_days(lifetimes_map, args.compress_days) # Create price directives for missing prices. prices = [] for key in lifetimes.required_weekly_prices(lifetimes_map, entries[-1].date): # If the price entry is already in the ledger, ignore it. if key in price_map: continue date, currency, cost_currency = key # Ignore entries too early. if args.min_date is not None and date < args.min_date: continue # Ignore entries with an empty cost currency. if cost_currency is None: continue # Create a price directive. price = data.Price(data.new_metadata(__file__, 0), date, currency, Amount(ZERO, cost_currency)) prices.append(price) # For now, just print those out. printer.print_entries(prices)
def unwrap_entry(data: dict) -> bean.Directive: type, e = itemgetter("type", "entry")(data) meta = e.get("meta") date = parse_date(e["date"]) if type == "Open": return bean.Open( meta, date, account=e["account"], currencies=e.get("currencies", []), booking=e.get("booking"), ) if type == "Close": return bean.Close(meta, date, account=e["account"]) if type == "Commodity": return bean.Commodity(meta, date, currency=e["currency"]) if type == "Pad": return bean.Pad(meta, date, account=e["account"], source_account=e["source_account"]) if type == "Balance": return bean.Balance( meta, date, account=e["account"], amount=parse_amount(e["amount"]), tolerance=e.get("tolerance"), diff_amount=e.get("diff_amount"), ) if type == "Transaction": return bean.Transaction( meta, date, flag=e["flag"], payee=e.get("payee"), narration=e["narration"], tags=set(e["tags"] if "tags" in e else []), links=set(e["links"] if "links" in e else []), postings=[parse_posting(p) for p in e.get("postings", [])], ) if type == "Note": return bean.Note(meta, date, account=e["account"], comment=e.get("comment", "")) if type == "Event": return bean.Event(meta, date, type=e["type"], description=e["description"]) if type == "Query": return bean.Query(meta, date, name=e["name"], query_string=e["query_string"]) if type == "Price": return bean.Price(meta, date, currency=e["currency"], amount=parse_amount(e["amount"])) if type == "Document": return bean.Document( meta, date, account=e["account"], filename=e["filename"], tags=set(e["tags"] if "tags" in e else []), links=set(e["links"] if "links" in e else []), ) if type == "Custom": return bean.Custom(meta, date, type=e["type"], values=e["values"])
def add_implicit_prices(entries, unused_options_map): """Insert implicitly defined prices from Transactions. Explicit price entries are simply maintained in the output list. Prices from postings with costs or with prices from Transaction entries are synthesized as new Price entries in the list of entries output. Args: entries: A list of directives. We're interested only in the Transaction instances. unused_options_map: A parser options dict. Returns: A list of entries, possibly with more Price entries than before, and a list of errors. """ new_entries = [] errors = [] # A dict of (date, currency, cost-currency) to price entry. new_price_entry_map = {} balances = collections.defaultdict(inventory.Inventory) for entry in entries: # Always replicate the existing entries. new_entries.append(entry) if isinstance(entry, Transaction): # Inspect all the postings in the transaction. for posting in entry.postings: units = posting.units cost = posting.cost # Check if the position is matching against an existing # position. _, booking = balances[posting.account].add_position(posting) # Add prices when they're explicitly specified on a posting. An # explicitly specified price may occur in a conversion, e.g. # Assets:Account 100 USD @ 1.10 CAD # or, if a cost is also specified, as the current price of the # underlying instrument, e.g. # Assets:Account 100 HOOL {564.20} @ {581.97} USD if posting.price is not None: meta = data.new_metadata(entry.meta["filename"], entry.meta["lineno"]) meta[METADATA_FIELD] = "from_price" price_entry = data.Price(meta, entry.date, units.currency, posting.price) # Add costs, when we're not matching against an existing # position. This happens when we're just specifying the cost, # e.g. # Assets:Account 100 HOOL {564.20} elif (cost is not None and booking != inventory.Booking.REDUCED): # TODO(blais): What happens here if the account has no # booking strategy? Do we end up inserting a price for the # reducing leg? Check. meta = data.new_metadata(entry.meta["filename"], entry.meta["lineno"]) meta[METADATA_FIELD] = "from_cost" price_entry = data.Price( meta, entry.date, units.currency, amount.Amount(cost.number, cost.currency)) else: price_entry = None if price_entry is not None: key = ( price_entry.date, price_entry.currency, price_entry.amount. number, # Ideally should be removed. price_entry.amount.currency) try: new_price_entry_map[key] ## Do not fail for now. We still have many valid use ## cases of duplicate prices on the same date, for ## example, stock splits, or trades on two dates with ## two separate reported prices. We need to figure out a ## more elegant solution for this in the long term. ## Keeping both for now. We should ideally not use the ## number in the de-dup key above. # # dup_entry = new_price_entry_map[key] # if price_entry.amount.number == dup_entry.amount.number: # # Skip duplicates. # continue # else: # errors.append( # ImplicitPriceError( # entry.meta, # "Duplicate prices for {} on {}".format(entry, # dup_entry), # entry)) except KeyError: new_price_entry_map[key] = price_entry new_entries.append(price_entry) return new_entries, errors