def save_entries(self): context = DisplayContext() context.set_commas(True) if self.split_type == 'account': # Append records on a per-account-file basis entries = [] previous_account = None for entry in self.sorted_entries(): filing_account = entry.meta.pop('_account') if filing_account != previous_account: file_name = pathlib.Path( filing_account.replace(':', '') + '.bean') if self.merge_file.is_dir(): file_name = self.merge_file.joinpath(file_name) logger.info(f"Writing {len(entries)} to {file_name.name}.") with file_name.open("a") as stream: # Add overwrite? printer.print_entries( entries, file=stream, dcontext=context, ) entries = [] entries.append(entry) else: # We will want to roll our own with self.merge_file.open("w") as outstream: printer.print_entries( list(self.sorted_entries()), file=self.fold_injector(outstream), dcontext=context, )
def main(): args, jobs, entries, dcontext = process_args() # If we're just being asked to list the jobs, do this here. if args.dry_run: for dprice in jobs: print(find_prices.format_dated_price_str(dprice)) return # Fetch all the required prices, processing all the jobs. executor = futures.ThreadPoolExecutor(max_workers=3) price_entries = filter( None, executor.map( functools.partial(fetch_price, swap_inverted=args.swap_inverted), jobs)) # Sort them by currency, regardless of date (the dates should be close # anyhow, and we tend to put them in chunks in the input files anyhow). price_entries = sorted(price_entries, key=lambda e: e.currency) # Avoid clobber, remove redundant entries. if not args.clobber: price_entries, ignored_entries = filter_redundant_prices( price_entries, entries) for entry in ignored_entries: logging.info("Ignored to avoid clobber: %s %s", entry.date, entry.currency) # Print out the entries. printer.print_entries(price_entries, dcontext=dcontext)
def main(): """Top-level function.""" parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('price_ledger', help="Ledger file containing a list of prices to fetch") parser.add_argument('output', help="Output directory to write all output files to.") parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG, format='%(levelname)-8s: %(message)s') os.makedirs(args.output, exist_ok=True) # Load the example file. logging.info("Reading ledger: %s", args.price_ledger) entries, _, _ = loader.load_file(args.price_ledger) source = yahoo.Source() new_entries = [] for price_entry in entries: assert isinstance(price_entry, data.Price) time = datetime.datetime.combine(price_entry.date, datetime.time(), tzinfo=tz.tzutc()) try: srcprice = source.get_historical_price(price_entry.currency, time) new_entry = price_entry._replace( amount=price_entry.amount._replace(number=srcprice.price)) new_entries.append(new_entry) except yahoo.YahooError as exc: print("ERROR: {}".format(exc)) printer.print_entries(new_entries)
def changeLedger(self): filename = askopenfilename() if filename: self.ledgerVar.set(filename) self.ledger, _, _ = loader.load_file(self.ledgerVar.get()) printer.print_entries(self.ledger) print('')
def test_interline_spacing(self): input_text = textwrap.dedent("""\ 2014-01-01 open Assets:Account1 2014-01-01 open Assets:Account2 2014-01-01 open Assets:Cash 2014-06-08 * Assets:Account1 111.00 BEAN Assets:Cash 2014-06-08 * "Narration" Assets:Account1 111.00 BEAN Assets:Cash 2014-06-08 * "Payee" "Narration" Assets:Account2 111.00 BEAN Assets:Cash 2014-10-01 close Assets:Account2 2014-10-11 price BEAN 10 USD 2014-10-12 price BEAN 11 USD 2014-10-13 price BEAN 11 USD """) entries, _, __ = loader.load_string(input_text) oss = io.StringIO() printer.print_entries(entries, file=oss) expected_classes = characterize_spaces(input_text) actual_classes = characterize_spaces(oss.getvalue()) self.assertEqual(expected_classes, actual_classes)
def context_(ehash=None): "Render the before & after context around a transaction entry." matching_entries = [ entry for entry in app.entries if ehash == compare.hash_entry(entry) ] oss = io.StringIO() if len(matching_entries) == 0: print("ERROR: Could not find matching entry for '{}'".format(ehash), file=oss) elif len(matching_entries) > 1: print("ERROR: Ambiguous entries for '{}'".format(ehash), file=oss) print(file=oss) dcontext = app.options['dcontext'] printer.print_entries(matching_entries, dcontext, file=oss) else: dcontext = app.options['dcontext'] oss.write("<pre>\n") for entry in matching_entries: oss.write( context.render_entry_context(app.entries, app.options, dcontext, entry.meta["filename"], entry.meta["lineno"])) oss.write("</pre>\n") return render_global(pagetitle="Context: {}".format(ehash), contents=oss.getvalue())
def assertRoundTrip(self, entries1, errors1): self.assertFalse(errors1) # Print out the entries and parse them back in. oss1 = io.StringIO() oss1.write('option "plugin_processing_mode" "raw"\n') oss1.write('option "experiment_query_directive" "TRUE"\n') printer.print_entries(entries1, file=oss1) entries2, errors, __ = loader.load_string(oss1.getvalue()) self.assertEqualEntries(entries1, entries2) self.assertFalse(errors) # Print out those reparsed and parse them back in. oss2 = io.StringIO() oss2.write('option "plugin_processing_mode" "raw"\n') oss2.write('option "experiment_query_directive" "TRUE"\n') printer.print_entries(entries2, file=oss2) entries3, errors, __ = loader.load_string(oss2.getvalue()) self.assertEqualEntries(entries1, entries3) self.assertFalse(errors) # Compare the two output texts. self.assertEqual(oss2.getvalue(), oss1.getvalue())
def test_expect_extract(self, filename, msg): """Extract entries from a test file and compare against expected output. If an expected file (as <filename>.extract) is not present, we issue a warning. Missing expected files can be written out by removing them before running the tests. Args: filename: A string, the name of the file to import using self.importer. Raises: AssertionError: If the contents differ from the expected file. """ # Import the file. entries = extract.extract_from_file(filename, self.importer, None, None) # Render the entries to a string. oss = io.StringIO() printer.print_entries(entries, file=oss) string = oss.getvalue() expect_filename = '{}.extract'.format(filename) if path.exists(expect_filename): expect_string = open(expect_filename, encoding='utf-8').read() self.assertEqual(expect_string.strip(), string.strip()) else: # Write out the expected file for review. open(expect_filename, 'w', encoding='utf-8').write(string) self.skipTest("Expected file not present; generating '{}'".format( expect_filename))
def do_missing_open(filename, args): """Print out Open directives that are missing for the given input file. This can be useful during demos in order to quickly generate all the required Open directives without having to type them manually. Args: filename: A string, which consists in the filename. args: A tuple of the rest of arguments. We're expecting the first argument to be an integer as a string. """ from beancount.parser import printer from beancount.core import data from beancount.core import getters from beancount import loader entries, errors, options_map = loader.load_file(filename) # Get accounts usage and open directives. first_use_map, _ = getters.get_accounts_use_map(entries) open_close_map = getters.get_account_open_close(entries) new_entries = [] for account, first_use_date in first_use_map.items(): if account not in open_close_map: new_entries.append( data.Open(data.new_metadata(filename, 0), first_use_date, account, None, None)) dcontext = options_map['dcontext'] printer.print_entries(data.sorted(new_entries), dcontext)
def main(): """Generate beancount output from gnucash file.""" options = args() with piecash.open_book(options.filename, open_if_lock=True) as book: entries = convert.load_entries(book) printer.print_entries(entries)
def render_beancount(self, entries, errors, options_map, file): # Create a context that renders all numbers with their natural # precision, but honors the commas option. This is kept in sync with # {2c694afe3140} to avoid a dependency. dcontext = display_context.DisplayContext() dcontext.set_commas(options_map['dcontext'].commas) printer.print_entries(entries, dcontext, file=file)
def run_testset(self, testset): training_data = self.load_test_data(testset, 'training') extracted_data = self.load_test_data(testset, 'extracted') @PredictPostings(suggest_accounts=True) class DummyImporter(ImporterProtocol): def extract( self, file: _FileMemo, existing_entries: List[Union[ALL_DIRECTIVES]] ) -> List[Union[ALL_DIRECTIVES]]: return extracted_data importer = DummyImporter() actualTrxs = importer.extract("dummy-data", existing_entries=training_data) with io.StringIO() as buffer: printer.print_entries(actualTrxs, file=buffer) actual = buffer.getvalue() expected_file_name = self.generate_file_name(testset, 'expected') if os.path.isfile(expected_file_name): with open(expected_file_name, 'r') as expected_file: expected = expected_file.read() self.assertEqual(expected, actual) else: with open(expected_file_name, 'w') as expected_file: expected_file.write(actual)
def test_extract(self, importer, file, pytestconfig): """Extract entries from a test file and compare against expected output.""" entries = extract.extract_from_file(file.name, importer, None, None) oss = io.StringIO() printer.print_entries(entries, file=oss) string = oss.getvalue() compare_contents_or_generate(string, '{}.extract'.format(file.name), pytestconfig.getoption("generate", False))
def test_no_valid_account(self, entries, errors, options_map): """ 2000-01-01 * "Test" Assets:Foo 2000-01-01 * "Test" Assets:Foo Assets:Bar """ oss = io.StringIO() printer.print_entries(entries, file=oss)
def render_beancount(self, entries, errors, options_map, file): # Don't allow any aggregations if we output as beancount format. for attribute in 'currency', 'relative', 'groupby': if getattr(self.args, attribute): self.parser.error( "'beancount' format does not support --{} option".format(attribute)) # Get the summarized entries and print them out. holdings_entries = get_holdings_entries(entries, options_map) dcontext = options_map['dcontext'] printer.print_entries(holdings_entries, dcontext, file=file)
def remote_entries(entries, options_map): """ @param entries: @param options_map: @return: """ errors = [] settings = options_map['coolbeans'] secrets_file = get_setting('google-apis', settings) connection = google_connect(secrets_file) new_entries_path = None new_entries_file = get_setting('new-entries-bean', settings) if new_entries_file: new_entries_path = pathlib.Path(new_entries_file) # Capture the configuration off the Open remote_accounts = {} for entry in entries: if not isinstance(entry, data.Open): continue document_name = entry.meta.get('document_name', None) default_currency = entry.currencies[ 0] if entry.currencies else DEFAULT_CURRENCY if document_name: options = dict(document_name=document_name, document_tab=entry.meta.get('document_tab', None), reverse_amount=entry.meta.get('reverse', False), default_currency=default_currency, entry=entry, entry_file=new_entries_path) remote_accounts[entry.account] = options new_entries = [] for account, options in remote_accounts.items(): try: new_entries += load_remote_account(connection=connection, errors=errors, account=account, options=options) except Exception as exc: logger.error(f"while processing {account}", exc_info=exc) if new_entries and new_entries_path: from beancount.parser import printer with new_entries_path.open("w") as stream: printer.print_entries(new_entries, file=stream) logger.info( f"Wrote {len(new_entries)} new account(s) to {new_entries_path}." ) return entries + new_entries, errors
def execute_print(c_print, entries, options_map, file): """Print entries from a print statement specification. Args: c_print: An instance of a compiled EvalPrint statemnet. entries: A list of directives. options_map: A parser's option_map. file: The output file to print to. """ if c_print and c_print.c_from is not None: entries = filter_entries(c_print.c_from, entries, options_map) printer.print_entries(entries, file=file)
def json_to_bean(json_str: dict): data = json_load_decimal(json_str) entries = [unwrap_entry(data) for data in data["entries"]] currs = data.get("options", {}).get("operating_currency", []) options = '\n'.join([f'option "operating_currency" "{c}"' for c in currs]) if len(options) > 0: options = options + '\n\n' buff = io.StringIO() printer.print_entries(entries, dcontext=DISPLAY_CONTEXT, file=buff) buff.seek(0) return options + buff.read(), entries, data["errors"], data["options"]
def test_zero_cost(self): input_string = textwrap.dedent(""" 2000-01-01 open Assets:Invest:Cash 2000-01-01 open Assets:Invest:Options 2000-01-03 * Assets:Invest:Options 100 HOOLOPT {0 USD, 2000-01-03} Assets:Invest:Cash 0 USD """) entries, errors, options_map = loader.load_string(input_string) self.assertFalse(errors) oss = io.StringIO() printer.print_entries(entries, file=oss) self.assertLines(input_string, oss.getvalue())
def write_price_directives(filename: str, pricer: Pricer, days_price_threshold: int): """Write a list of required price directives as a Beancount file.""" price_entries = [] for (currency, required_date), found_dates in sorted(pricer.required_prices.items()): assert len(found_dates) == 1 cost_currency, actual_date, rate = found_dates.pop() days_late = (required_date - actual_date).days if days_late < days_price_threshold: continue price = data.Price({}, required_date, currency, Amount(rate, cost_currency)) price_entries.append(price) with open_with_mkdir(filename) as prfile: printer.print_entries(price_entries, file=prfile)
def write_expected(outfile: TextIO, account: data.Account, date: Optional[datetime.date], name: Optional[str], entries: data.Entries): """Produce the expected output file. Args: outfile: The file object where to write. account: The account name produced by the importer. date: The date of the downloads file, produced by the importer. name: The filename for filing, produced by the importer. entries: The list of entries extracted by the importer. """ print(';; Account: {}'.format(account), file=outfile) print(';; Date: {}'.format(date.isoformat() if date else ''), file=outfile) print(';; Name: {}'.format(name or ''), file=outfile) printer.print_entries(entries, file=outfile)
def test_align_with_weight(self, entries, errors, options_map): """ 2014-01-01 open Assets:US:Investments:HOOL 2014-01-01 open Expenses:Commissions 2014-01-01 open Assets:US:Investments:Cash 2014-07-01 * "Something" Assets:US:Investments:HOOL 45 HOOL {504.30 USD} Assets:US:Investments:HOOL 4 HOOL {504.30 USD, 2014-11-11} Expenses:Commissions 9.9520 USD Assets:US:Investments:Cash -22473.32 CAD @ 1.10 USD """ self.assertFalse(errors) dcontext = options_map['dcontext'] oss = io.StringIO() printer.print_entries(entries, dcontext, render_weights=False, file=oss) expected_str = ''.join([ '2014-01-01 open Assets:US:Investments:HOOL\n', '2014-01-01 open Expenses:Commissions\n', '2014-01-01 open Assets:US:Investments:Cash\n', '\n', '2014-07-01 * "Something"\n', ' Assets:US:Investments:HOOL 45 HOOL {504.30 USD} \n', ' Assets:US:Investments:HOOL 4 HOOL {504.30 USD, 2014-11-11}\n', ' Expenses:Commissions 9.95 USD \n', ' Assets:US:Investments:Cash -22473.32 CAD @ 1.1000 USD \n', ]) self.assertEqual(expected_str, oss.getvalue()) oss = io.StringIO() printer.print_entries(entries, dcontext, render_weights=True, file=oss) expected_str = textwrap.dedent("""\ 2014-01-01 open Assets:US:Investments:HOOL 2014-01-01 open Expenses:Commissions 2014-01-01 open Assets:US:Investments:Cash 2014-07-01 * "Something" Assets:US:Investments:HOOL 45 HOOL {504.30 USD} ; 22693.50 USD Assets:US:Investments:HOOL 4 HOOL {504.30 USD, 2014-11-11} ; 2017.20 USD Expenses:Commissions 9.95 USD ; 9.9520 USD Assets:US:Investments:Cash -22473.32 CAD @ 1.1000 USD ; -24720.6520 USD """) self.assertEqual(expected_str, oss.getvalue())
def execute_print(c_print, entries, options_map, file): """Print entries from a print statement specification. Args: c_print: An instance of a compiled EvalPrint statemnet. entries: A list of directives. options_map: A parser's option_map. file: The output file to print to. """ if c_print and c_print.c_from is not None: entries = filter_entries(c_print.c_from, entries, options_map) # Create a context that renders all numbers with their natural # precision, but honors the commas option. This is kept in sync with # {2c694afe3140} to avoid a dependency. dcontext = display_context.DisplayContext() dcontext.set_commas(options_map['dcontext'].commas) printer.print_entries(entries, dcontext, file=file)
def main(): """Top-level function.""" today = datetime.date.today() parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('instrument', help="Yahoo!Finance code for financial instrument.") parser.add_argument( 'start', action='store', type=lambda x: dateutil.parser.parse(x).date(), default=today.replace(year=today.year - 1), help="Start date of interval. Default is one year ago.") parser.add_argument('end', action='store', type=lambda x: dateutil.parser.parse(x).date(), default=today, help="End date of interval. Default is today ago.") args = parser.parse_args() # Get the data. source = yahoo.Source() sprices = source.get_daily_prices( args.instrument, datetime.datetime.combine(args.start, datetime.time()), datetime.datetime.combine(args.end, datetime.time())) if sprices is None: raise RuntimeError("Could not fetch from {}".format(source)) # Attempt to infer the right quantization and quantize if succesfull. quant = number.infer_quantization_from_numbers([s.price for s in sprices]) if quant: sprices = [ sprice._replace(price=sprice.price.quantize(quant)) for sprice in sprices ] # Convert to Price entries and output. price_entries = [] for sprice in sprices: price_entries.append( data.Price({}, sprice.time.date(), args.instrument, amount.Amount(sprice.price, sprice.quote_currency))) printer.print_entries(price_entries)
def test_precision(self, entries, errors, options_map): """ 2014-01-01 open Assets:Account 2014-01-01 open Assets:Cash 2014-07-01 * Assets:Account 1 INT Assets:Account 1.1 FP1 Assets:Account 22.22 FP2 Assets:Account 333.333 FP3 Assets:Account 4444.4444 FP4 Assets:Account 55555.55555 FP5 Assets:Cash -1 INT Assets:Cash -1.1 FP1 Assets:Cash -22.22 FP2 Assets:Cash -333.333 FP3 Assets:Cash -4444.4444 FP4 Assets:Cash -55555.55555 FP5 """ dcontext = options_map['dcontext'] oss = io.StringIO() printer.print_entries(entries, dcontext, file=oss) expected_str = textwrap.dedent(""" 2014-01-01 open Assets:Account 2014-01-01 open Assets:Cash """) + textwrap.dedent("""\ 2014-07-01 * Assets:Account 1 INT Assets:Cash -1 INT Assets:Account 1.1 FP1 Assets:Cash -1.1 FP1 Assets:Account 22.22 FP2 Assets:Cash -22.22 FP2 Assets:Account 333.333 FP3 Assets:Cash -333.333 FP3 Assets:Account 4444.4444 FP4 Assets:Cash -4444.4444 FP4 Assets:Account 55555.55555 FP5 Assets:Cash -55555.55555 FP5 """) self.assertLines(expected_str, oss.getvalue())
def main(): import argparse, logging logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount input filename') # Note: Historical data is not supported yet because Google Finance will not # serve us historical currency rates. # # parser.add_argument('-d', '--date', action='store', type=parse_date, # help="The date at which to extract the holdings") args = parser.parse_args() entries, errors, options_map = loader.load_file(args.filename) price_entries = [] commodities_list = holdings.get_commodities_at_date(entries, options_map) for currency, cost_currency, quote_currency, ticker in commodities_list: # Ignore the commodity if it has no ticker defined on it. if ticker is None: continue # Select the quote currency if declared, otherwise use the cost # currency. quote_currency = quote_currency or cost_currency # Invert the currencies if the rate is to be inverted. if ticker.startswith('1/'): ticker = ticker[2:] currency, quote_currency = quote_currency, currency price, price_time = get_google_finance_latest_price(ticker) fileloc = data.new_metadata('<fetch-prices>', 0) price_entries.append( data.Price(fileloc, price_time.date(), currency, amount.Amount(price, quote_currency))) printer.print_entries(price_entries)
def test_very_small_number(self): # We want to make sure we never render with scientific notation. input_string = textwrap.dedent(""" 2016-11-05 open Expenses:Bank:Conversion 2016-11-05 open Expenses:Gifts 2016-11-05 open Expenses:Entertainment:Travel 2016-11-05 open Assets:Current:Bank:SomeBank 2016-11-05 * "Aquarium" Expenses:Gifts 435 DKK Expenses:Entertainment:Travel 340 DKK Expenses:Bank:Conversion Assets:Current:Bank:SomeBank -204.17 BGN @@ 775.00 DKK """) entries, errors, options_map = loader.load_string(input_string) self.assertFalse(errors) oss = io.StringIO() printer.print_entries(entries, file=oss) self.assertRegex(oss.getvalue(), '0.0000000000000000000000001 DKK')
def test_align(self, entries, errors, options_map): """ 2014-01-01 open Expenses:Commissions 2014-07-01 * "Something" Expenses:Commissions 20000 USD Expenses:Commissions 9.9505 USD Expenses:Commissions -20009.9505 USD """ dcontext = options_map['dcontext'] oss = io.StringIO() printer.print_entries(entries, dcontext, file=oss) expected_str = textwrap.dedent("""\ 2014-01-01 open Expenses:Commissions 2014-07-01 * "Something" Expenses:Commissions 20000.0000 USD Expenses:Commissions 9.9505 USD Expenses:Commissions -20009.9505 USD """) self.assertEqual(expected_str, oss.getvalue())
def store_sorted_ledger(ledger, path, key_dict={ 'Transaction': 2, 'Balance': 1, 'Open': 0, 'Other': 2 }, title='J\'s Beancount file'): ledger = sort_by_date(ledger, key_dict) # print('storing {} entries.'.format(len(ledger))) # printer.print_entries(ledger) # for e in ledger: # print(e) with open(path, 'w+') as ledgerFile: ledgerFile.write('option "title" "{}"\n'.format(title) + 'option "operating_currency" "EUR"\n\n') printer.print_entries(ledger, file=ledgerFile)
def missing_open(filename): """Print Open directives missing in FILENAME. This can be useful during demos in order to quickly generate all the required Open directives without having to type them manually. """ entries, errors, options_map = loader.load_file(filename) # Get accounts usage and open directives. first_use_map, _ = getters.get_accounts_use_map(entries) open_close_map = getters.get_account_open_close(entries) new_entries = [] for account, first_use_date in first_use_map.items(): if account not in open_close_map: new_entries.append( data.Open(data.new_metadata(filename, 0), first_use_date, account, None, None)) dcontext = options_map['dcontext'] printer.print_entries(data.sorted(new_entries), dcontext)