def test_validate(self, entries, errors, options_map): """ ;; Just trigger a few errors from here to ensure at least some of the plugins ;; tested above are run. 2014-01-01 open Assets:Investments:Cash 2014-01-01 open Assets:Investments:Stock AAPL 2014-06-23 * "Go positive" Assets:Investments:Stock 1 AAPL {41 USD} Assets:Investments:Cash -41 USD 2014-06-24 * "Go negative from zero" Assets:Investments:Stock -1 AAPL {42 USD} Assets:Investments:Cash 42 USD 2014-06-23 * "Use invalid currency" Assets:Investments:Stock 1 HOOG {500 USD} Assets:Investments:Cash -500 USD """ validation_errors = validation.validate(entries, options_map) self.assertEqual(2, len(errors)) self.assertRegexpMatches(errors[0].message, 'Reducing position results') self.assertRegexpMatches(errors[1].message, 'Invalid currency') self.assertEqual(1, len(validation_errors)) self.assertRegexpMatches(validation_errors[0].message, 'Invalid currency')
def _load(sources, log_timings, extra_validations, encoding): """Parse Beancount input, run its transformations and validate it. (This is an internal method.) This routine does all that is necessary to obtain a list of entries ready for realization and working with them. This is the principal call for of the scripts that load a ledger. It returns a list of entries transformed and ready for reporting, a list of errors, and parser's options dict. Args: sources: A list of (filename-or-string, is-filename) where the first element is a string, with either a filename or a string to be parsed directly, and the second argument is a boolean that is true if the first is a filename. You may provide a list of such arguments to be parsed. Filenames must be absolute paths. log_timings: A file object or function to write timings to, or None, if it should remain quiet. extra_validations: A list of extra validation functions to run after loading this list of entries. encoding: A string or None, the encoding to decode the input filename with. Returns: See load() or load_string(). """ assert isinstance(sources, list) and all( isinstance(el, tuple) for el in sources) if hasattr(log_timings, 'write'): log_timings = log_timings.write # Parse all the files recursively. Ensure that the entries are sorted before # running any processes on them. with misc_utils.log_time('parse', log_timings, indent=1): entries, parse_errors, options_map = _parse_recursive( sources, log_timings, encoding) entries.sort(key=data.entry_sortkey) # Run interpolation on incomplete entries. with misc_utils.log_time('booking', log_timings, indent=1): entries, balance_errors = booking.book(entries, options_map) parse_errors.extend(balance_errors) # Transform the entries. with misc_utils.log_time('run_transformations', log_timings, indent=1): entries, errors = run_transformations(entries, parse_errors, options_map, log_timings) # Validate the list of entries. with misc_utils.log_time('beancount.ops.validate', log_timings, indent=1): valid_errors = validation.validate(entries, options_map, log_timings, extra_validations) errors.extend(valid_errors) # Note: We could go hardcore here and further verify that the entries # haven't been modified by user-provided validation routines, by # comparing hashes before and after. Not needed for now. # Compute the input hash. options_map['input_hash'] = compute_input_hash(options_map['include']) return entries, errors, options_map
def plugin(entries: List[Directive], options: Dict, viewpoint: str) -> Tuple[List[Directive], List]: is_top_level = include_context['is_top_level'] if not is_top_level: return entries, [] include_context['is_top_level'] = False logger = ErrorLogger() errors = validation.validate(entries, options) logger.log_errors(errors) entries = process_ledger(entries, viewpoint == 'nobody', options, logger) if viewpoint != 'nobody': entries = fill_residuals(entries, options) entries = select_viewpoint(entries, viewpoint, logger) entries = map_residual_accounts(entries, logger) entries = open_subaccounts(entries, logger) include_context['is_top_level'] = True return entries, logger.errors
def test_create_open_directive(self, entries, errors, options_map): """ 2014-01-01 open Assets:Account1 2014-01-01 open Income:Misc 2014-01-15 * Income:Misc Assets:Account1 1 HOUSE {100 USD} 2014-01-16 price HOUSE 110 USD """ # Test the creation of a new, undeclared income account, check that open # directives are present for accounts that have been created # automatically, because the resulting set of entries should validation # no matter what modifications. # Test it out without a subaccount, only an open directive should be # added for the income account. new_entries, errors = unrealized.add_unrealized_gains( entries, options_map) self.assertEqual( {'Income:Misc', 'Assets:Account1', 'Income:Account1'}, { entry.account for entry in new_entries if isinstance(entry, data.Open) }) # Test it with a subaccount; we should observe new open directives for # the subaccounts as well. new_entries, _ = unrealized.add_unrealized_gains( entries, options_map, 'Gains') self.assertEqual( { 'Income:Misc', 'Assets:Account1', 'Assets:Account1:Gains', 'Income:Account1:Gains' }, { entry.account for entry in new_entries if isinstance(entry, data.Open) }) # Validate the new entries; validation should pass. valid_errors = validation.validate(new_entries, options_map) self.assertFalse(valid_errors)