def generate_table(self, entries, errors, options_map): field_spec = [ (0, 'Currency'), (1, 'Net Worth', '{:,.2f}'.format), ] return table.create_table(calculate_net_worths(entries, options_map), field_spec)
def __call__(self, parser, namespace, values, option_string=None): # Get all the report types and formats. matrix = [] for report_class in get_all_reports(): formats = report_class.get_supported_formats() matrix.append((report_class.names[0], formats)) # Compute a list of unique output formats. all_formats = sorted( {format_ for name, formats in matrix for format_ in formats}, key=lambda fmt: self.format_order.get(fmt, self.format_order_last)) # Bulid a list of rows. rows = [] for name, formats in matrix: xes = ['X' if fmt in formats else '' for fmt in all_formats] rows.append([name] + xes) # Build a description of the rows, a field specificaiton. header = ['Name'] + all_formats field_spec = [(index, name) for index, name in enumerate(header)] # Create and render an ASCII table. table_ = table.create_table(rows, field_spec) sys.stdout.write(table.table_to_text(table_, " ")) sys.exit(0)
def render_real_text(self, real_root, price_map, price_date, options_map, file): rows = [] account_types = options.get_account_types(options_map) for root in (account_types.assets, account_types.liabilities): for unused_first_line, unused_cont_line, real_account in realization.dump( realization.get(real_root, root)): last_posting = realization.find_last_active_posting( real_account.txn_postings) # Don't render updates to accounts that have been closed. # Note: this is O(N), maybe we can store this at realization. if last_posting is None or isinstance(last_posting, data.Close): continue last_date = data.get_entry(last_posting).date # Skip this posting if a cutoff date has been specified and the # account has been updated to at least the cutoff date. if self.args.cutoff and self.args.cutoff <= last_date: continue rows.append((real_account.account, last_date)) table_ = table.create_table(rows, [(0, 'Account'), (1, 'Last Date', '{}'.format)]) table.render_table(table_, file, 'text')
def generate_table(self, entries, errors, options_map): holdings_list, price_map = get_assets_holdings(entries, options_map) holdings_list_orig = holdings_list # Keep only the holdings where currency is the same as the cost-currency. holdings_list = [ holding for holding in holdings_list if (holding.currency == holding.cost_currency or holding.cost_currency is None) ] # Keep only those holdings held in one of the operating currencies. if self.args.operating_only: operating_currencies = set(options_map['operating_currency']) holdings_list = [ holding for holding in holdings_list if holding.currency in operating_currencies ] # Compute the list of ignored holdings and optionally report on them. if self.args.ignored: ignored_holdings = set(holdings_list_orig) - set(holdings_list) holdings_list = ignored_holdings # Convert holdings to a unified currency. if self.args.currency: holdings_list = holdings.convert_to_currency( price_map, self.args.currency, holdings_list) return table.create_table(holdings_list, FIELD_SPEC)
def report_holdings(currency, relative, entries, options_map, aggregation_key=None, sort_key=None): """Generate a detailed list of all holdings. Args: currency: A string, a currency to convert to. If left to None, no conversion is carried out. relative: A boolean, true if we should reduce this to a relative value. entries: A list of directives. options_map: A dict of parsed options. aggregation_key: A callable use to generate aggregations. sort_key: A function to use to sort the holdings, if specified. Returns: A Table instance. """ holdings_list, _ = get_assets_holdings(entries, options_map, currency) if aggregation_key: holdings_list = holdings.aggregate_holdings_by(holdings_list, aggregation_key) if relative: holdings_list = holdings.reduce_relative(holdings_list) field_spec = RELATIVE_FIELD_SPEC else: field_spec = FIELD_SPEC if sort_key: holdings_list.sort(key=sort_key, reverse=True) return table.create_table(holdings_list, field_spec)
def main(): import argparse, logging logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount input filename to process') parser.add_argument('symbols', nargs='+', help='Substantially identical stock symbols.') parser.add_argument('-y', '--year', action='store', type=int, default=datetime.date.today().year - 1, help="Calendar year to analyze") args = parser.parse_args() entries, errors, options_map = loader.load_file(args.filename) trades = get_trades(entries, options_map, set(args.symbols), datetime.date(args.year + 1, 1, 1)) field_spec = list( enumerate( 'type acq_date adj_acq_date sell_date number currency cost cost_basis price proceeds fee pnl' .split())) table_ = table.create_table(trades, field_spec) table.render_table(table_, sys.stdout, 'text') table.render_table(table_, open('/tmp/washsales.csv', 'w'), 'csv')
def generate_table(self, entries, errors, options_map): events = {} for entry in entries: if isinstance(entry, data.Event): events[entry.type] = entry.description return table.create_table(list(sorted( events.items())), [(0, "Type", self.formatter.render_event_type), (1, "Description")])
def generate_table(self, entries, errors, options_map): event_entries = [] for entry in entries: if not isinstance(entry, data.Event): continue if self.args.expr and not re.match(self.args.expr, entry.type): continue event_entries.append(entry) return table.create_table([(entry.date, entry.type, entry.description) for entry in event_entries], [(0, "Date", datetime.date.isoformat), (1, "Type"), (2, "Description")])
def generate_table(self, entries, _, __): entries_by_type = misc_utils.groupby(lambda entry: type(entry).__name__, entries) nb_entries_by_type = {name: len(entries) for name, entries in entries_by_type.items()} rows = sorted(nb_entries_by_type.items(), key=lambda x: x[1], reverse=True) rows = [(name, str(count)) for (name, count) in rows] rows.append(('~Total~', str(len(entries)))) return table.create_table(rows, [(0, 'Type'), (1, 'Num Entries', '{:>}'.format)])
def test_create_table_with_index(self): tuples = [ ('USD', '1111.00'), ('CAD', '1333.33'), ] table_object = table.create_table(tuples, [(0, 'Currency'), 1]) self.assertEqual( table.Table(columns=[0, 1], header=['Currency', 'Field 1'], body=[['USD', '1111.00'], ['CAD', '1333.33']]), table_object)
def generate_table(self, entries, errors, options_map): commodity_map = getters.get_commodity_map(entries, options_map) ticker_info = getters.get_values_meta(commodity_map, 'name', 'ticker', 'quote') price_rows = [ (currency, cost_currency, ticker, name) for currency, (name, ticker, cost_currency) in sorted(ticker_info.items()) if ticker] return table.create_table(price_rows, [(0, "Currency"), (1, "Cost-Currency"), (2, "Symbol"), (3, "Name")])
def test_create_table(self): Tup = collections.namedtuple('Tup', 'country capital currency amount') tuples = [ Tup("Malawi", "Lilongwe", "Kwacha", D("0.111")), Tup("Mali", "Bamako", "CFA franc", D("0.222")), Tup("Mauritania", "Nouakchott", "Ouguiya", D("0.333")), ] table_object = table.create_table( tuples, ["country", ("capital",), ("currency", "Currency"), ("amount", "Amount", "{:.3f}".format)], ) return table_object
def generate_table(self, entries, _, __): all_postings = [posting for entry in entries if isinstance(entry, data.Transaction) for posting in entry.postings] postings_by_account = misc_utils.groupby(lambda posting: posting.account, all_postings) nb_postings_by_account = {key: len(postings) for key, postings in postings_by_account.items()} rows = sorted(nb_postings_by_account.items(), key=lambda x: x[1], reverse=True) rows = [(name, str(count)) for (name, count) in rows] rows.append(('~Total~', str(sum(nb_postings_by_account.values())))) return table.create_table(rows, [(0, 'Account'), (1, 'Num Postings', '{:>}'.format)])
def create_summary_table(totals): summary_fields = list( enumerate(['Currency', 'Gain', 'Loss', 'Net', 'Adj/Wash'])) summary = [] gain = ZERO loss = ZERO adj = ZERO for currency in sorted(totals.adj.keys()): gain += totals.gain[currency] loss += totals.loss[currency] adj += totals.adj[currency] summary.append((currency, totals.gain[currency], totals.loss[currency], totals.gain[currency] + totals.loss[currency], totals.adj[currency])) summary.append(('*', gain, loss, gain + loss, adj)) return table.create_table(summary, summary_fields)
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount input file') parser.add_argument('accounts', nargs='+', help='Account names') parser.add_argument('--date', type=date_utils.parse_date_liberally, help="Date") parser.add_argument('-o', '--output', action='store', help="Output directory for the CSV files") args = parser.parse_args() entries, errors, options_map = loader.load_file(args.filename) # Filter out anything after the given date. if args.date is None: args.date = entries[-1].date entries = [entry for entry in entries if entry.date < args.date] # Compute the balance in each account and process it. real_root = realization.realize(entries) rows = [] fieldspec = list(enumerate(['Vest Date', 'Units', 'Instrument', 'Cost'])) for account in args.accounts: real_acc = realization.get(real_root, account) if real_acc is None: logging.error("Account '%s' does not exist", account) continue for position in real_acc.balance: rows.append((position.cost.date, position.units.number, position.units.currency, position.cost.number, position.cost.currency)) rows = sorted(rows) tbl = table.create_table(rows, fieldspec) table.render_table(tbl, sys.stdout, 'text') if args.output: with open(path.join(args.output), 'w') as file: table.render_table(tbl, file, 'csv')
def generate_table(self, entries, errors, options_map): holdings_list, price_map = get_assets_holdings(entries, options_map) net_worths = [] for currency in options_map['operating_currency']: # Convert holdings to a unified currency. # # Note: It's entirely possible that the price map does not have all # the necessary rate conversions here. The resulting holdings will # simply have no cost when that is the case. We must handle this # gracefully below. currency_holdings_list = holdings.convert_to_currency( price_map, currency, holdings_list) if not currency_holdings_list: continue holdings_list = holdings.aggregate_holdings_by( currency_holdings_list, lambda holding: holding.cost_currency) holdings_list = [ holding for holding in holdings_list if holding.currency and holding.cost_currency ] # If after conversion there are no valid holdings, skip the currency # altogether. if not holdings_list: continue net_worths.append((currency, holdings_list[0].market_value)) field_spec = [ (0, 'Currency'), (1, 'Net Worth', '{:,.2f}'.format), ] return table.create_table(net_worths, field_spec)
def create_detailed_table(sales, calculate_commission): """Convert into a table of data, full detail of very single log.""" Q = D('0.01') lots = [] total_loss = collections.defaultdict(D) total_gain = collections.defaultdict(D) total_adj = collections.defaultdict(D) # If no mssb number has been assigned explicitly, assign a random one. I # need to figure out how to find those numbers again. auto_mssb_number = itertools.count(start=1000000 + 1) for sale in sales: try: sale_no = sale.txn.meta['mssb'] except KeyError: sale_no = next(auto_mssb_number) ref = sale.txn.meta['ref'] units = sale.posting.units totcost = (-units.number * sale.posting.cost.number).quantize(Q) totprice = (-units.number * sale.posting.price.number).quantize(Q) commission_meta = sale.posting.meta.get('commission', None) if commission_meta is None: commission = ZERO else: if calculate_commission: commission = commission_meta else: # Fetch the commission that was inserted by the commissions plugin. commission = commission_meta.get_only_position().units.number commission = commission.quantize(Q) pnl = (totprice - totcost - commission).quantize(Q) is_wash = sale.posting.meta.get('wash', False) # Ensure the key in all the dicts. (total_gain[units.currency], total_loss[units.currency], total_adj[units.currency]) if totprice > totcost: total_gain[units.currency] += pnl else: total_loss[units.currency] += pnl if is_wash: total_adj[units.currency] += pnl code = 'W' adj = -pnl else: code = '' adj = '' days_held = (sale.txn.date - sale.posting.cost.date).days term = 'LONG' if days_held >= 365 else 'SHORT' lot = LotSale(sale_no, ref, sale.posting.cost.date, sale.txn.date, days_held, term, units.currency, -units.number.quantize(Q), sale.posting.cost.number.quantize(Q), sale.posting.price.number.quantize(Q), totcost, totprice, commission, totprice - commission, pnl, code, adj) lots.append(lot) Totals = collections.namedtuple('Totals', 'loss gain adj') totals = Totals(total_loss, total_gain, total_adj) return lots, table.create_table(lots, fieldspec), totals
def generate_table(self, entries, errors, options_map): ABC = collections.namedtuple('ABC', 'account balance') return table.create_table( [ABC('account1', D(2000)), ABC('account2', D(5000))])
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('report', choices=['detail', 'aggregate', 'summary'], help='Type of report') parser.add_argument('filename', help='Beancount input file') parser.add_argument('account', help='Account name') parser.add_argument('--start', type=date_utils.parse_date_liberally, help="Start date") parser.add_argument('--end', type=date_utils.parse_date_liberally, help="End date; if not set, at the end of star'ts year") parser.add_argument('-o', '--output', action='store', help="Output filename for the CSV file") args = parser.parse_args() calculate_commission = False # Setup date interval. if args.start is None: args.start = datetime.date(datetime.date.today().year, 1, 1) if args.end is None: args.end = datetime.date(args.start.year + 1, 1, 1) entries, errors, options_map = loader.load_file(args.filename) # Expand each of the sales legs. balances = collections.defaultdict(inventory.Inventory) sales = [] for txn in data.filter_txns(entries): # If we got to the end of the period, bail out. if txn.date >= args.end: break # Accumulate the balances before the start date. if txn.date < args.start: for posting in txn.postings: if re.match(args.account, posting.account): balance = balances[posting.account] balance.add_position(posting) continue # Fallthrough: we're not in the period. Process the matching postings. # Find reducing postings (i.e., for each lot). txn_sales = [] for posting in txn.postings: if re.match(args.account, posting.account): balance = balances[posting.account] reduced_position, booking = balance.add_position(posting) # Set the cost on the posting from the reduced position. # FIXME: Eventually that'll happen automatically during the full # booking stage. if booking == inventory.Booking.REDUCED: posting = posting._replace(cost=reduced_position.cost) # If the postings don't have a reference number, ignore them. if 'ref' not in txn.meta: continue if (posting.cost and posting.units.number < ZERO): if not posting.price: logging.error("Missing price on %s", posting) txn_sales.append(data.TxnPosting(txn, posting)) if txn_sales and calculate_commission: # Find total commission. for posting in txn.postings: if re.search('Commission', posting.account): commission = posting.units.number break else: commission = ZERO # Compute total number of units. tot_units = sum(sale.posting.units.number for sale, _ in txn_sales) # Assign a proportion of the commission to each of the sales by # inserting it into its posting metadata. This will be processed below. for sale, _ in txn_sales: fraction = sale.posting.units.number / tot_units sale.posting.meta['commission'] = fraction * commission sales.extend(txn_sales) # Convert into a table of data, full detail of very single log. Q = D('0.01') lots = [] total_loss = collections.defaultdict(D) total_gain = collections.defaultdict(D) total_adj = collections.defaultdict(D) # If no mssb number has been assigned explicitly, assign a random one. I # need to figure out how to find those numbers again. auto_mssb_number = itertools.count(start=1000000 + 1) for sale in sales: try: sale_no = sale.txn.meta['mssb'] except KeyError: sale_no = next(auto_mssb_number) ref = sale.txn.meta['ref'] units = sale.posting.units totcost = (-units.number * sale.posting.cost.number).quantize(Q) totprice = (-units.number * sale.posting.price.number).quantize(Q) commission_meta = sale.posting.meta.get('commission', None) if commission_meta is None: commission = ZERO else: if calculate_commission: commission = commission_meta else: # Fetch the commission that was inserted by the commissions plugin. commission = commission_meta[0].units.number commission = commission.quantize(Q) pnl = (totprice - totcost - commission).quantize(Q) is_wash = sale.posting.meta.get('wash', False) if totprice > totcost: total_gain[units.currency] += pnl else: total_loss[units.currency] += pnl if is_wash: total_adj[units.currency] += pnl code = 'W' adj = -pnl else: code = '' adj = '' days_held = (sale.txn.date - sale.posting.cost.date).days term = 'LONG' if days_held >= 365 else 'SHORT' lot = LotSale(sale_no, ref, sale.posting.cost.date, sale.txn.date, days_held, term, units.currency, -units.number.quantize(Q), sale.posting.cost.number.quantize(Q), sale.posting.price.number.quantize(Q), totcost, totprice, commission, totprice - commission, pnl, code, adj) lots.append(lot) tab_detail = table.create_table(lots, fieldspec) # Aggregate by transaction in order to be able to cross-check against the # 1099 forms. agglots = [aggregate_sales(lots) for _, lots in misc_utils.groupby( lambda lot: (lot.no, lot.ref), lots).items()] tab_agg = table.create_table(sorted(agglots, key=lambda lot: (lot.ref, lot.no)), fieldspec) # Write out a summary of P/L. summary_fields = list(enumerate(['Currency', 'Gain', 'Loss', 'Net', 'Adj/Wash'])) summary = [] gain = ZERO loss = ZERO adj = ZERO for currency in sorted(total_adj.keys()): gain += total_gain[currency] loss += total_loss[currency] adj += total_adj[currency] summary.append((currency, total_gain[currency], total_loss[currency], total_gain[currency] + total_loss[currency], total_adj[currency])) summary.append(('*', gain, loss, gain + loss, adj)) tab_summary = table.create_table(summary, summary_fields) if args.report == 'detail': # Render to the console. print('Detail of all lots') print('=' * 48) table.render_table(tab_detail, sys.stdout, 'txt') print() if args.output: with open(args.output, 'w') as file: table.render_table(tab_detail, file, 'csv') elif args.report == 'aggregate': print('Aggregated by trade & Reference Number (to Match 1099/Form8459)') print('=' * 48) table.render_table(tab_agg, sys.stdout, 'txt') print() if args.output: with open(args.output, 'w') as file: table.render_table(tab_agg, file, 'csv') elif args.report == 'summary': print('Summary') print('=' * 48) table.render_table(tab_summary, sys.stdout, 'txt') print() if args.output: with open(args.output, 'w') as file: table.render_table(tab_summary, file, 'csv')
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('report', choices=['detail', 'aggregate', 'summary'], help='Type of report') parser.add_argument('filename', help='Beancount input file') parser.add_argument('account', help='Account name') parser.add_argument('--start', type=date_utils.parse_date_liberally, help="Start date") parser.add_argument( '--end', type=date_utils.parse_date_liberally, help="End date; if not set, at the end of star'ts year") parser.add_argument( '-o', '--output', action='store', help="Output directory of all the reports, in txt and csv formats") args = parser.parse_args() calculate_commission = False # Setup date interval. if args.start is None: args.start = datetime.date(datetime.date.today().year, 1, 1) if args.end is None: args.end = datetime.date(args.start.year + 1, 1, 1) entries, errors, options_map = loader.load_file(args.filename) # Create the list of sales. sales = expand_sales_legs(entries, args.account, args.start, args.end, calculate_commission) # Produce a detailed table. lots, tab_detail, totals = create_detailed_table(sales, calculate_commission) # Aggregate by transaction in order to be able to cross-check against the # 1099 forms. agglots = aggregate_sales(lots) tab_agg = table.create_table( sorted(agglots, key=lambda lot: (lot.ref, lot.no)), fieldspec) # Create a summary table of P/L. tab_summary = create_summary_table(totals) # Render all the reports to an output directory. if args.output: os.makedirs(args.output, exist_ok=True) for name, tab in [('detail', tab_detail), ('aggregate', tab_agg), ('summary', tab_summary)]: for fmt in 'txt', 'csv': with open(path.join(args.output, '{}.{}'.format(name, fmt)), 'w') as outfile: table.render_table(tab, outfile, fmt) # Rendering individual reports to the console. if args.report == 'detail': print('Detail of all lots') print('=' * 48) table.render_table(tab_detail, sys.stdout, 'txt') print() elif args.report == 'aggregate': print( 'Aggregated by trade & Reference Number (to Match 1099/Form8459)') print('=' * 48) table.render_table(tab_agg, sys.stdout, 'txt') print() elif args.report == 'summary': print('Summary') print('=' * 48) table.render_table(tab_summary, sys.stdout, 'txt') print()
def generate_table(self, entries, errors, options_map): date_rates = self.get_date_rates(entries) return table.create_table(date_rates, [(0, "Date", datetime.date.isoformat), (1, "Price", '{:.5f}'.format)])
def generate_table(self, entries, errors, options_map): price_map = prices.build_price_map(entries) return table.create_table([(base_quote,) for base_quote in sorted(price_map.forward_pairs)], [(0, "Base/Quote", self.formatter.render_commodity)])