def main(): import argparse, logging logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount input filename to process') parser.add_argument('symbols', nargs='+', help='Substantially identical stock symbols.') parser.add_argument('-y', '--year', action='store', type=int, default=datetime.date.today().year-1, help="Calendar year to analyze") args = parser.parse_args() entries, errors, options_map = loader.load_file(args.filename) trades = get_trades(entries, options_map, set(args.symbols), datetime.date(args.year+1, 1, 1)) field_spec = list(enumerate('type acq_date adj_acq_date sell_date number currency cost cost_basis price proceeds fee pnl'.split())) table_ = table.create_table(trades, field_spec) table.render_table(table_, sys.stdout, 'text') table.render_table(table_, open('/tmp/washsales.csv', 'w'), 'csv')
def render_htmldiv(self, entries, errors, options_map, file): table_ = self.generate_table(entries, errors, options_map) table.render_table(table_, file, 'htmldiv', css_id=self.css_id, css_class=self.css_class)
def render_real_text(self, real_root, price_map, price_date, options_map, file): rows = [] account_types = options.get_account_types(options_map) for root in (account_types.assets, account_types.liabilities): for unused_first_line, unused_cont_line, real_account in realization.dump( realization.get(real_root, root)): last_posting = realization.find_last_active_posting( real_account.txn_postings) # Don't render updates to accounts that have been closed. # Note: this is O(N), maybe we can store this at realization. if last_posting is None or isinstance(last_posting, data.Close): continue last_date = data.get_entry(last_posting).date # Skip this posting if a cutoff date has been specified and the # account has been updated to at least the cutoff date. if self.args.cutoff and self.args.cutoff <= last_date: continue rows.append((real_account.account, last_date)) table_ = table.create_table(rows, [(0, 'Account'), (1, 'Last Date', '{}'.format)]) table.render_table(table_, file, 'text')
def main(): """Extract trades from metadata-annotated postings and report on them. """ logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = version.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount input filename') oparser = parser.add_argument_group('Outputs') oparser.add_argument( '-o', '--output', action='store', help="Filename to output results to (default goes to stdout)") oparser.add_argument('-f', '--format', default='text', choices=['text', 'csv'], help="Output format to render to (text, csv)") args = parser.parse_args() # Load the input file. entries, errors, options_map = loader.load_file(args.filename) # Get the list of trades. trades = extract_trades(entries) # Produce a table of all the trades. columns = ('units currency cost_currency ' 'buy_date buy_price sell_date sell_price pnl').split() header = [ 'Units', 'Currency', 'Cost Currency', 'Buy Date', 'Buy Price', 'Sell Date', 'Sell Price', 'P/L' ] body = [] for aug, red in trades: units = -red.posting.units.number buy_price = aug.posting.price.number sell_price = red.posting.price.number pnl = (units * (sell_price - buy_price)).quantize(buy_price) body.append([ -red.posting.units.number, red.posting.units.currency, red.posting.price.currency, aug.txn.date.isoformat(), buy_price, red.txn.date.isoformat(), sell_price, pnl ]) trades_table = table.Table(columns, header, body) # Render the table as text or CSV. outfile = open(args.output, 'w') if args.output else sys.stdout table.render_table(trades_table, outfile, args.format)
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount input file') parser.add_argument('accounts', nargs='+', help='Account names') parser.add_argument('--date', type=date_utils.parse_date_liberally, help="Date") parser.add_argument('-o', '--output', action='store', help="Output directory for the CSV files") args = parser.parse_args() entries, errors, options_map = loader.load_file(args.filename) # Filter out anything after the given date. if args.date is None: args.date = entries[-1].date entries = [entry for entry in entries if entry.date < args.date] # Compute the balance in each account and process it. real_root = realization.realize(entries) rows = [] fieldspec = list(enumerate(['Vest Date', 'Units', 'Instrument', 'Cost'])) for account in args.accounts: real_acc = realization.get(real_root, account) if real_acc is None: logging.error("Account '%s' does not exist", account) continue for position in real_acc.balance: rows.append((position.cost.date, position.units.number, position.units.currency, position.cost.number, position.cost.currency)) rows = sorted(rows) tbl = table.create_table(rows, fieldspec) table.render_table(tbl, sys.stdout, 'text') if args.output: with open(path.join(args.output), 'w') as file: table.render_table(tbl, file, 'csv')
def test_generate_table(self): table_object = self.test_create_table() oss = io.StringIO() table.render_table(table_object, oss, 'csv') self.assertTrue(oss.getvalue()) oss = io.StringIO() table.render_table(table_object, oss, 'txt') self.assertTrue(oss.getvalue()) oss = io.StringIO() table.render_table(table_object, oss, 'html') self.assertTrue(oss.getvalue())
def render_csv(self, entries, errors, options_map, file): table_ = self.generate_table(entries, errors, options_map) table.render_table(table_, file, 'csv')
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('report', choices=['detail', 'aggregate', 'summary'], help='Type of report') parser.add_argument('filename', help='Beancount input file') parser.add_argument('account', help='Account name') parser.add_argument('--start', type=date_utils.parse_date_liberally, help="Start date") parser.add_argument('--end', type=date_utils.parse_date_liberally, help="End date; if not set, at the end of star'ts year") parser.add_argument('-o', '--output', action='store', help="Output directory of all the reports, in txt and csv formats") args = parser.parse_args() calculate_commission = False # Setup date interval. if args.start is None: args.start = datetime.date(datetime.date.today().year, 1, 1) if args.end is None: args.end = datetime.date(args.start.year + 1, 1, 1) entries, errors, options_map = loader.load_file(args.filename) # Create the list of sales. sales = expand_sales_legs(entries, args.account, args.start, args.end, calculate_commission) # Produce a detailed table. lots, tab_detail, totals = create_detailed_table(sales, calculate_commission) # Aggregate by transaction in order to be able to cross-check against the # 1099 forms. agglots = aggregate_sales(lots) tab_agg = table.create_table(sorted(agglots, key=lambda lot: (lot.ref, lot.no)), fieldspec) # Create a summary table of P/L. tab_summary = create_summary_table(totals) # Render all the reports to an output directory. if args.output: os.makedirs(args.output, exist_ok=True) for name, tab in [('detail', tab_detail), ('aggregate', tab_agg), ('summary', tab_summary)]: for fmt in 'txt', 'csv': with open(path.join(args.output, '{}.{}'.format(name, fmt)), 'w') as outfile: table.render_table(tab, outfile, fmt) # Rendering individual reports to the console. if args.report == 'detail': print('Detail of all lots') print('=' * 48) table.render_table(tab_detail, sys.stdout, 'txt') print() elif args.report == 'aggregate': print('Aggregated by trade & Reference Number (to Match 1099/Form8459)') print('=' * 48) table.render_table(tab_agg, sys.stdout, 'txt') print() elif args.report == 'summary': print('Summary') print('=' * 48) table.render_table(tab_summary, sys.stdout, 'txt') print()