Example #1
0
    def _initialize(self, options_map):
        """Compute the list of filtered entries and realization trees."""

        # Get the filtered list of entries.
        self.entries, self.begin_index, self.price_date = self.apply_filter(
            self.all_entries, options_map)

        # Compute the list of entries for the opening balances sheet.
        self.opening_entries = (self.entries[:self.begin_index]
                                if self.begin_index is not None
                                else [])

        # Compute the list of entries that includes transfer entries of the
        # income/expenses amounts to the balance sheet's equity (as "net
        # income"). This is used to render the end-period balance sheet, with
        # the current period's net income, closing the period.
        self.closing_entries = summarize.cap_opt(self.entries, options_map)

        # Realize the three sets of entries.
        account_types = options.get_account_types(options_map)
        with misc_utils.log_time('realize_opening', logging.info):
            self.opening_real_accounts = realization.realize(self.opening_entries,
                                                             account_types)

        with misc_utils.log_time('realize', logging.info):
            self.real_accounts = realization.realize(self.entries,
                                                     account_types)

        with misc_utils.log_time('realize_closing', logging.info):
            self.closing_real_accounts = realization.realize(self.closing_entries,
                                                             account_types)

        assert self.real_accounts is not None
        assert self.closing_real_accounts is not None
Example #2
0
def _load(sources, log_timings, extra_validations, encoding):
    """Parse Beancount input, run its transformations and validate it.

    (This is an internal method.)
    This routine does all that is necessary to obtain a list of entries ready
    for realization and working with them. This is the principal call for of the
    scripts that load a ledger. It returns a list of entries transformed and
    ready for reporting, a list of errors, and parser's options dict.

    Args:
      sources: A list of (filename-or-string, is-filename) where the first
        element is a string, with either a filename or a string to be parsed directly,
        and the second argument is a boolean that is true if the first is a filename.
        You may provide a list of such arguments to be parsed. Filenames must be absolute
        paths.
      log_timings: A file object or function to write timings to,
        or None, if it should remain quiet.
      extra_validations: A list of extra validation functions to run after loading
        this list of entries.
      encoding: A string or None, the encoding to decode the input filename with.
    Returns:
      See load() or load_string().
    """
    assert isinstance(sources, list) and all(
        isinstance(el, tuple) for el in sources)

    if hasattr(log_timings, 'write'):
        log_timings = log_timings.write

    # Parse all the files recursively. Ensure that the entries are sorted before
    # running any processes on them.
    with misc_utils.log_time('parse', log_timings, indent=1):
        entries, parse_errors, options_map = _parse_recursive(
            sources, log_timings, encoding)
        entries.sort(key=data.entry_sortkey)

    # Run interpolation on incomplete entries.
    with misc_utils.log_time('booking', log_timings, indent=1):
        entries, balance_errors = booking.book(entries, options_map)
        parse_errors.extend(balance_errors)

    # Transform the entries.
    with misc_utils.log_time('run_transformations', log_timings, indent=1):
        entries, errors = run_transformations(entries, parse_errors,
                                              options_map, log_timings)

    # Validate the list of entries.
    with misc_utils.log_time('beancount.ops.validate', log_timings, indent=1):
        valid_errors = validation.validate(entries, options_map, log_timings,
                                           extra_validations)
        errors.extend(valid_errors)

        # Note: We could go hardcore here and further verify that the entries
        # haven't been modified by user-provided validation routines, by
        # comparing hashes before and after. Not needed for now.

    # Compute the input hash.
    options_map['input_hash'] = compute_input_hash(options_map['include'])

    return entries, errors, options_map
Example #3
0
    def test_log_time(self):
        with test_utils.capture() as stdout:
            with misc_utils.log_time('test-op', None):
                time.sleep(0.1)
        self.assertEqual("", stdout.getvalue())

        with test_utils.capture() as stdout:
            with misc_utils.log_time('test-op', sys.stdout.write):
                time.sleep(0.1)
        self.assertRegex(stdout.getvalue(), "Operation")
        self.assertRegex(stdout.getvalue(), "Time")
Example #4
0
def main(filename: str, verbose: bool, no_cache: bool, cache_filename: str,
         auto: bool):
    """Parse, check and realize a beancount ledger.

    This also measures the time it takes to run all these steps.

    """
    use_cache = not no_cache

    # Insert auto plugins. This is convenient for importers because when
    # generating a subset of transactions oftentimes we don't have the
    # contextual account and commodity creation routines. See {4ec6a3205b6c}.
    if auto:
        loader.PLUGINS_AUTO.extend(loader.DEFAULT_PLUGINS_AUTO)

    if verbose:
        logging.basicConfig(level=logging.INFO,
                            format='%(levelname)-8s: %(message)s')

    # Override loader caching setup.
    if not use_cache or cache_filename:
        loader.initialize(use_cache, cache_filename)

    with misc_utils.log_time('beancount.loader (total)', logging.info):
        # Load up the file, print errors, checking and validation are invoked
        # automatically.
        entries, errors, _ = loader.load_file(
            filename,
            log_timings=logging.info,
            log_errors=sys.stderr,
            # Force slow and hardcore validations, just for check.
            extra_validations=validation.HARDCORE_VALIDATIONS)

    # Exit with an error code if there were any errors.
    sys.exit(1 if errors else 0)
Example #5
0
def main():
    parser = argparse.ArgumentParser("Coolbeans Report Runner")

    parser.add_argument(
        '-e',
        '-f',
        '--existing',
        '--previous',
        metavar='BEANCOUNT_FILE',
        default=None,
        help=('Beancount file or existing entries for de-duplication '
              '(optional)'))
    parser.add_argument('-r',
                        '--rules',
                        action='store',
                        metavar='RULES_FILENAME',
                        help=('Rules specification file. '
                              'This is a YAML file with Match Rules '))
    return parser

    extra_validations = False

    logging.basicConfig(
        level=logging.INFO if args.timings else logging.WARNING,
        format='%(levelname)-8s: %(message)s')

    # Parse the input file.
    errors_file = None if args.no_errors else sys.stderr
    with misc_utils.log_time('beancount.loader (total)', logging.info):
        entries, errors, options_map = loader.load_file(
            args.filename,
            log_timings=logging.info,
            log_errors=errors_file,
            extra_validations=extra_validations)
Example #6
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument('filename', help='Beancount input filename.')

    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Print timings.')

    opts = parser.parse_args()

    if opts.verbose:
        logging.basicConfig(level=logging.INFO,
                            format='%(levelname)-8s: %(message)s')

    with misc_utils.log_time('beancount.loader (total)', logging.info):
        # Load up the file, print errors, checking and validation are invoked
        # automatically.
        entries, errors, _ = loader.load_file(
            opts.filename,
            log_timings=logging.info,
            log_errors=sys.stderr,
            # Force slow and hardcore validations, just for check.
            extra_validations=validation.HARDCORE_VALIDATIONS)

    # Exit with an error code if there were any errors, so this can be used in a
    # shell conditional.
    return 1 if errors else 0
Example #7
0
def validate(entries, options_map, log_timings=None, extra_validations=None):
    """Perform all the standard checks on parsed contents.

    Args:
      entries: A list of directives.
      unused_options_map: An options map.
      log_timings: An optional function to use for logging the time of individual
        operations.
      extra_validations: A list of extra validation functions to run after loading
        this list of entries.
    Returns:
      A list of new errors, if any were found.
    """
    validation_tests = VALIDATIONS
    if extra_validations:
        validation_tests += extra_validations

    # Run various validation routines define above.
    errors = []
    for validation_function in validation_tests:
        with misc_utils.log_time('function: {}'.format(
                validation_function.__name__),
                                 log_timings,
                                 indent=2):
            new_errors = validation_function(entries, options_map)
        errors.extend(new_errors)

    return errors
Example #8
0
def main(filename, verbose, no_cache, cache_filename):
    """Parse, check and realize a beancount ledger.

    This also measures the time it takes to run all these steps.

    """
    use_cache = not no_cache

    if verbose:
        logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s')

    # Override loader caching setup.
    if not use_cache or cache_filename:
        loader.initialize(use_cache, cache_filename)

    with misc_utils.log_time('beancount.loader (total)', logging.info):
        # Load up the file, print errors, checking and validation are invoked
        # automatically.
        entries, errors, _ = loader.load_file(
            filename,
            log_timings=logging.info,
            log_errors=sys.stderr,
            # Force slow and hardcore validations, just for check.
            extra_validations=validation.HARDCORE_VALIDATIONS)

    # Exit with an error code if there were any errors.
    sys.exit(1 if errors else 0)
Example #9
0
def main(filename, database):
    """Convert a Beancount ledger into an SQL database.

    Write ledger FILENAME contents into SQLite database DATABASE.

    """
    logging.basicConfig(level=logging.INFO,
                        format='%(levelname)-8s: %(message)s')

    entries, errors, options_map = loader.load_file(filename,
                                                    log_timings=logging.info,
                                                    log_errors=sys.stderr)

    # Delete previous database if it already exists.
    if path.exists(database):
        os.remove(database)

    # The only supported DBAPI-2.0 backend for now is SQLite3.
    connection = dbapi.connect(database)

    setup_decimal_support()
    for function in [
            output_common,
            output_transactions,
            OpenWriter(),
            CloseWriter(),
            PadWriter(),
            BalanceWriter(),
            NoteWriter(),
            PriceWriter(),
            DocumentWriter(),
    ]:
        step_name = getattr(function, '__name__', function.__class__.__name__)
        with misc_utils.log_time(step_name, logging.info):
            function(connection, entries)
Example #10
0
 def apply_filter(self, entries, options_map):
     # Clamp to the desired period.
     begin_date = datetime.date(self.year, self.first_month, 1)
     end_date = datetime.date(self.year + 1, self.first_month, 1)
     with misc_utils.log_time('clamp', logging.info):
         entries, index = summarize.clamp_opt(entries, begin_date, end_date,
                                              options_map)
     return entries, index
Example #11
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)

    default_file = os.environ.get(BEAN_FILE_ENV, None)

    parser.add_argument(
        '-e', '--bean',
        metavar=BEAN_FILE_ENV,
        default=default_file,
        required=False,
        type=str,
        dest='bean_file',
        help=f"Beancount file to read and verify. {'Default is '+ default_file if default_file else ''}"
    )
    parser.add_argument(
        '-v', '--verbose',
        action='store_true',
        help='Print timings.'
    )
    parser.add_argument(
        '--pdb',
        action='store_true',
        help='Drop into a debugger on error'
    )
    parser.add_argument(
        '--logging-conf',
        type=str,
        default='./logging.yaml',
        help='logging.yaml file to use.  Default is ./logging.yaml'
    )
    args = parser.parse_args()

    logging_conf: Path = Path(args.logging_conf)

    logging_config(
        config_file=logging_conf,
        level=logging.DEBUG if args.verbose else logging.INFO
    )

    with misc_utils.log_time('beancount.loader (total)', logging.info):
        # Load up the file, print errors, checking and validation are invoked
        # automatically.
        try:
            entries, errors, _ = loader.load_file(
                args.bean_file,
                log_timings=logging.info,
                log_errors=sys.stderr,
                # Force slow and hardcore validations, just for check.
                extra_validations=validation.HARDCORE_VALIDATIONS)
        except Exception as exc:
            if args.pdb:
                pdb.post_mortem(exc.__traceback__)
            else:
                raise
def main():
    parser = version.ArgumentParser(description=__doc__)

    parser.add_argument('filename', help='Beancount input filename.')

    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Print timings.')

    # Note: These are useful during development. We need to devise a global
    # mechanism that will work from all the invocation programs, embedded in the
    # loader.
    parser.add_argument('-C',
                        '--no-cache',
                        action='store_false',
                        dest='use_cache',
                        default=True,
                        help='Disable the cache from the command-line.')
    parser.add_argument('--cache-filename',
                        action='store',
                        help='Override the name of the cache')

    opts = parser.parse_args()

    if opts.verbose:
        logging.basicConfig(level=logging.INFO,
                            format='%(levelname)-8s: %(message)s')

    # Override loader caching setup if disabled or if the filename is
    # overridden.
    if not opts.use_cache or opts.cache_filename:
        loader.initialize(opts.use_cache, opts.cache_filename)

    with misc_utils.log_time('beancount.loader (total)', logging.info):
        # Load up the file, print errors, checking and validation are invoked
        # automatically.
        entries, errors, _ = loader.load_file(
            opts.filename,
            log_timings=logging.info,
            log_errors=sys.stderr,
            # Force slow and hardcore validations, just for check.
            extra_validations=validation.HARDCORE_VALIDATIONS)

    # Exit with an error code if there were any errors, so this can be used in a
    # shell conditional.
    return 1 if errors else 0
Example #13
0
def main(filename: str, verbose: bool, no_cache: bool, cache_filename: str, auto: bool):
    """Parse, check and realize a beancount ledger.

    This also measures the time it takes to run all these steps.

    """
    use_cache = not no_cache

    try:
        if auto:
            # Insert auto plugins. This is convenient for importers
            # because when generating a subset of transactions
            # oftentimes we don't have the contextual account and
            # commodity creation routines. See {4ec6a3205b6c}.
            old_plugins_auto = loader.PLUGINS_AUTO[:]
            loader.PLUGINS_AUTO.extend(loader.DEFAULT_PLUGINS_AUTO)

        if verbose:
            logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s')

        # Override loader caching setup.
        if not use_cache or cache_filename:
            loader.initialize(use_cache, cache_filename)

        with misc_utils.log_time('beancount.loader (total)', logging.info):
            # Load up the file, print errors, checking and validation
            # are invoked automatically.
            entries, errors, _ = loader.load_file(
                filename,
                log_timings=logging.info,
                log_errors=sys.stderr,
                # Force slow and hardcore validations, just for check.
                extra_validations=validation.HARDCORE_VALIDATIONS)
    finally:
        if auto:
            # Remove auto plugins. This is not necessary when this
            # code is run as script but it is needed when run as part
            # of the test suite (which does not span a new Python
            # interpreter for each script invocation test).
            loader.PLUGINS_AUTO[:] = old_plugins_auto

    # Exit with an error code if there were any errors.
    sys.exit(1 if errors else 0)
Example #14
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('filename', help='Beancount input filename')
    parser.add_argument('database', help='Filename of database file to create')
    args = parser.parse_args()
    logging.basicConfig(level=logging.INFO,
                        format='%(levelname)-8s: %(message)s')

    entries, errors, options_map = loader.load_file(args.filename,
                                                    log_timings=logging.info,
                                                    log_errors=sys.stderr)

    # Delete previous database if it already exists.
    if path.exists(args.database):
        os.remove(args.database)

    # The only supported DBAPI-2.0 backend for now is SQLite3.
    connection = dbapi.connect(args.database)

    setup_decimal_support()
    for function in [
            output_common,
            output_transactions,
            OpenWriter(),
            CloseWriter(),
            PadWriter(),
            BalanceWriter(),
            NoteWriter(),
            PriceWriter(),
            DocumentWriter(),
    ]:
        step_name = getattr(function, '__name__', function.__class__.__name__)
        with misc_utils.log_time(step_name, logging.info):
            function(connection, entries)

    return 0
Example #15
0
def run_transformations(entries, parse_errors, options_map, log_timings):
    """Run the various transformations on the entries.

    This is where entries are being synthesized, checked, plugins are run, etc.

    Args:
      entries: A list of directives as read from the parser.
      parse_errors: A list of errors so far.
      options_map: An options dict as read from the parser.
      log_timings: A function to write timing log entries to, or None, if it
        should be quiet.
    Returns:
      A list of modified entries, and a list of errors, also possibly modified.
    """
    # A list of errors to extend (make a copy to avoid modifying the input).
    errors = list(parse_errors)

    # Process the plugins.
    if options_map['plugin_processing_mode'] == 'raw':
        plugins_iter = options_map["plugin"]
    elif options_map['plugin_processing_mode'] == 'default':
        plugins_iter = itertools.chain(DEFAULT_PLUGINS_PRE,
                                       options_map["plugin"],
                                       DEFAULT_PLUGINS_POST)
    else:
        assert "Invalid value for plugin_processing_mode: {}".format(
            options_map['plugin_processing_mode'])

    for plugin_name, plugin_config in plugins_iter:

        # Issue a warning on a renamed module.
        renamed_name = RENAMED_MODULES.get(plugin_name, None)
        if renamed_name:
            warnings.warn(
                "Deprecation notice: Module '{}' has been renamed to '{}'; "
                "please adjust your plugin directive.".format(
                    plugin_name, renamed_name))
            plugin_name = renamed_name

        # Try to import the module.
        #
        # Note: We intercept import errors and continue but let other plugin
        # import time exceptions fail a run, by choice.
        try:
            module = importlib.import_module(plugin_name)
            if not hasattr(module, '__plugins__'):
                continue
        except ImportError:
            # Upon failure, just issue an error.
            formatted_traceback = traceback.format_exc().replace("\n", "\n  ")
            errors.append(
                LoadError(
                    data.new_metadata("<load>", 0),
                    'Error importing "{}": {}'.format(plugin_name,
                                                      formatted_traceback),
                    None))
            continue

        # Apply it.
        with misc_utils.log_time(plugin_name, log_timings, indent=2):
            # Run each transformer function in the plugin.
            for function_name in module.__plugins__:
                if isinstance(function_name, str):
                    # Support plugin functions provided by name.
                    callback = getattr(module, function_name)
                else:
                    # Support function types directly, not just names.
                    callback = function_name

                # Provide arguments if config is provided.
                # TODO(blais): Make this consistent in v3, not conditional.
                args = () if plugin_config is None else (plugin_config, )

                # Catch all exceptions raised in running the plugin, except exits.
                try:
                    entries, plugin_errors = callback(entries, options_map,
                                                      *args)
                    errors.extend(plugin_errors)
                except Exception as exc:
                    # Allow the user to exit in a plugin.
                    if isinstance(exc, SystemExit):
                        raise

                    # Upon failure, just issue an error.
                    formatted_traceback = traceback.format_exc().replace(
                        "\n", "\n  ")
                    errors.append(
                        LoadError(
                            data.new_metadata("<load>", 0),
                            'Error applying plugin "{}": {}'.format(
                                plugin_name, formatted_traceback), None))
                    continue

            # Ensure that the entries are sorted. Don't trust the plugins
            # themselves.
            entries.sort(key=data.entry_sortkey)

    return entries, errors
Example #16
0
def _parse_recursive(sources, log_timings, encoding=None):
    """Parse Beancount input, run its transformations and validate it.

    Recursively parse a list of files or strings and their include files and
    return an aggregate of parsed directives, errors, and the top-level
    options-map. If the same file is being parsed twice, ignore it and issue an
    error.

    Args:
      sources: A list of (filename-or-string, is-filename) where the first
        element is a string, with either a filename or a string to be parsed directly,
        and the second argument is a boolean that is true if the first is a filename.
        You may provide a list of such arguments to be parsed. Filenames must be absolute
        paths.
      log_timings: A function to write timings to, or None, if it should remain quiet.
      encoding: A string or None, the encoding to decode the input filename with.
    Returns:
      A tuple of (entries, parse_errors, options_map).
    """
    assert isinstance(sources, list) and all(
        isinstance(el, tuple) for el in sources)

    # Current parse state.
    entries, parse_errors = [], []
    options_map = None

    # A stack of sources to be parsed.
    source_stack = list(sources)

    # A list of absolute filenames that have been parsed in the past, used to
    # detect and avoid duplicates (cycles).
    filenames_seen = set()

    with misc_utils.log_time('beancount.parser.parser', log_timings, indent=1):
        while source_stack:
            source, is_file = source_stack.pop(0)
            is_top_level = options_map is None

            # If the file is encrypted, read it in and process it as a string.
            if is_file:
                cwd = path.dirname(source)
                source_filename = source
                if encryption.is_encrypted_file(source):
                    source = encryption.read_encrypted_file(source)
                    is_file = False
            else:
                # If we're parsing a string, the CWD is the current process
                # working directory.
                cwd = os.getcwd()
                source_filename = None

            if is_file:
                # All filenames here must be absolute.
                assert path.isabs(source)
                filename = path.normpath(source)

                # Check for file previously parsed... detect duplicates.
                if filename in filenames_seen:
                    parse_errors.append(
                        LoadError(
                            data.new_metadata("<load>", 0),
                            'Duplicate filename parsed: "{}"'.format(filename),
                            None))
                    continue

                # Check for a file that does not exist.
                if not path.exists(filename):
                    parse_errors.append(
                        LoadError(data.new_metadata("<load>", 0),
                                  'File "{}" does not exist'.format(filename),
                                  None))
                    continue

                # Parse a file from disk directly.
                filenames_seen.add(filename)
                with misc_utils.log_time('beancount.parser.parser.parse_file',
                                         log_timings,
                                         indent=2):
                    (src_entries, src_errors,
                     src_options_map) = parser.parse_file(filename,
                                                          encoding=encoding)

                cwd = path.dirname(filename)
            else:
                # Encode the contents if necessary.
                if encoding:
                    if isinstance(source, bytes):
                        source = source.decode(encoding)
                    source = source.encode('ascii', 'replace')

                # Parse a string buffer from memory.
                with misc_utils.log_time(
                        'beancount.parser.parser.parse_string',
                        log_timings,
                        indent=2):
                    (src_entries, src_errors,
                     src_options_map) = parser.parse_string(
                         source, source_filename)

            # Merge the entries resulting from the parsed file.
            entries.extend(src_entries)
            parse_errors.extend(src_errors)

            # We need the options from the very top file only (the very
            # first file being processed). No merging of options should
            # occur.
            if is_top_level:
                options_map = src_options_map
            else:
                aggregate_options_map(options_map, src_options_map)

            # Add includes to the list of sources to process. chdir() for glob,
            # which uses it indirectly.
            include_expanded = []
            with file_utils.chdir(cwd):
                for include_filename in src_options_map['include']:
                    matched_filenames = glob.glob(include_filename,
                                                  recursive=True)
                    if matched_filenames:
                        include_expanded.extend(matched_filenames)
                    else:
                        parse_errors.append(
                            LoadError(
                                data.new_metadata("<load>", 0),
                                'File glob "{}" does not match any files'.
                                format(include_filename), None))
            for include_filename in include_expanded:
                if not path.isabs(include_filename):
                    include_filename = path.join(cwd, include_filename)
                include_filename = path.normpath(include_filename)

                # Add the include filenames to be processed later.
                source_stack.append((include_filename, True))

    # Make sure we have at least a dict of valid options.
    if options_map is None:
        options_map = options.OPTIONS_DEFAULTS.copy()

    # Save the set of parsed filenames in options_map.
    options_map['include'] = sorted(filenames_seen)

    return entries, parse_errors, options_map
Example #17
0
 def load():
     errors_file = None if args.no_errors else sys.stderr
     with misc_utils.log_time('beancount.loader (total)', logging.info):
         return loader.load_file(args.filename,
                                 log_timings=logging.info,
                                 log_errors=errors_file)
Example #18
0
def run_transformations(entries, parse_errors, options_map, log_timings):
    """Run the various transformations on the entries.

    This is where entries are being synthesized, checked, plugins are run, etc.

    Args:
      entries: A list of directives as read from the parser.
      parse_errors: A list of errors so far.
      options_map: An options dict as read from the parser.
      log_timings: A function to write timing log entries to, or None, if it
        should be quiet.
    Returns:
      A list of modified entries, and a list of errors, also possibly modified.
    """
    # A list of errors to extend (make a copy to avoid modifying the input).
    errors = list(parse_errors)

    # Process the plugins.
    if options_map['plugin_processing_mode'] == 'raw':
        plugins_iter = options_map["plugin"]
    elif options_map['plugin_processing_mode'] == 'default':
        plugins_iter = itertools.chain(DEFAULT_PLUGINS_PRE,
                                       options_map["plugin"],
                                       DEFAULT_PLUGINS_POST)
    else:
        assert "Invalid value for plugin_processing_mode: {}".format(
            options_map['plugin_processing_mode'])

    for plugin_name, plugin_config in plugins_iter:

        # Issue a warning on a renamed module.
        renamed_name = RENAMED_MODULES.get(plugin_name, None)
        if renamed_name:
            warnings.warn(
                "Deprecation notice: Module '{}' has been renamed to '{}'; "
                "please adjust your plugin directive.".format(
                    plugin_name, renamed_name))
            plugin_name = renamed_name

        # Try to import the module.
        try:
            module = importlib.import_module(plugin_name)
            if not hasattr(module, '__plugins__'):
                continue

            with misc_utils.log_time(plugin_name, log_timings, indent=2):

                # Run each transformer function in the plugin.
                for function_name in module.__plugins__:
                    if isinstance(function_name, str):
                        # Support plugin functions provided by name.
                        callback = getattr(module, function_name)
                    else:
                        # Support function types directly, not just names.
                        callback = function_name

                    if plugin_config is not None:
                        entries, plugin_errors = callback(
                            entries, options_map, plugin_config)
                    else:
                        entries, plugin_errors = callback(entries, options_map)
                    errors.extend(plugin_errors)

            # Ensure that the entries are sorted. Don't trust the plugins
            # themselves.
            entries.sort(key=data.entry_sortkey)

        except (ImportError, TypeError) as exc:
            # Upon failure, just issue an error.
            errors.append(
                LoadError(
                    data.new_metadata("<load>", 0),
                    'Error importing "{}": {}'.format(plugin_name,
                                                      str(exc)), None))

    return entries, errors
Example #19
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)

    default_file = os.environ.get(BEAN_FILE_ENV, None)
    default_home = os.environ.get('BEAN_HOME', None)

    parser.add_argument("source", type=argparse.FileType("r"), default="-")
    parser.add_argument(
        "-o",
        "--output",
        "--target",
        type=str,
        dest="target",
        default="-",
    )
    parser.add_argument(
        "--quote-currency",
        type=str,
        dest="currency",
        default=None,
        help="The currency in which this commodity is priced.  Defaults to USD."
    )
    parser.add_argument(
        '-e',
        '--bean',
        metavar=BEAN_FILE_ENV,
        default=default_file,
        required=False,
        type=str,
        dest='bean_file',
        help=
        f"Beancount file to read and verify. {'Default is '+ default_file if default_file else ''}"
    )
    parser.add_argument('--pdb',
                        action='store_true',
                        help='Drop into a debugger on error')
    parser.add_argument(
        '--logging-conf',
        type=str,
        default=default_home + '/logging.yaml',
        help='logging.yaml file to use.  Default is ./logging.yaml')
    args = parser.parse_args()

    logging_conf: Path = Path(args.logging_conf)
    if logging_conf.exists():
        logging_config(config_file=logging_conf, level=logging.DEBUG)

    # Output File Handling
    out_file = args.target
    if out_file == "-":
        stream = sys.stdout
    else:
        out_path = Path(out_file)
        if out_path.exists():
            stream = out_path.open("a")
        else:
            stream = out_path.open("w")

    with misc_utils.log_time('beancount.loader (total)', logging.info):
        # Load up the file, print errors, checking and validation are invoked
        # automatically.
        try:
            entries, errors, context = loader.load_file(
                args.bean_file,
                log_timings=logging.info,
                log_errors=sys.stderr)
        except Exception as exc:
            if args.pdb:
                pdb.post_mortem(exc.__traceback__)
            else:
                raise

    quote_currency: str = args.currency
    if not quote_currency:
        oc = context.get('operating_currency', ['USD'])
        if oc:
            quote_currency = oc[0]
    logger.info(f"Using quote currency: {quote_currency}")

    # Build the Price Database:
    price_db: typing.Dict[str, dict] = defaultdict(dict)

    for entry in entries:
        if not isinstance(entry, data.Price):
            continue
        price_db[entry.currency][entry.date] = entry.amount

    # We could handle different source formats (JSON, YAML)
    reader = csv.reader(args.source)

    price_entries = read_price_stream(reader, price_db, quote_currency)

    loader.printer.print_entries(price_entries, file=stream)
Example #20
0
def main(argv=None):
    parser = version.ArgumentParser(description=__doc__)

    parser.add_argument(
        '--help-reports',
        '--list-reports',
        nargs='?',
        default=None,
        action=ListReportsAction,
        help="Print the full list of supported reports and exit.")

    parser.add_argument(
        '--help-formats',
        '--list-formats',
        nargs='?',
        default=None,
        action=ListFormatsAction,
        help="Print the full list of supported formats and exit.")

    parser.add_argument(
        '-f',
        '--format',
        default=None,
        choices=['text', 'csv', 'html', 'htmldiv', 'xls', 'ofx', 'beancount'],
        help="Output format.")

    parser.add_argument(
        '-o',
        '--output',
        action='store',
        help=("Output filename. If not specified, the output goes "
              "to stdout. The filename is inspected to select a "
              "sensible default format, if one is not requested."))

    parser.add_argument('-t',
                        '--timings',
                        '--verbose',
                        action='store_true',
                        help='Print timings.')

    parser.add_argument('-q',
                        '--no-errors',
                        action='store_true',
                        help='Do not report errors.')

    parser.add_argument('filename',
                        metavar='FILENAME.beancount',
                        help='The Beancount input filename to load.')

    subparsers = parser.add_subparsers(
        title='report', help='Name/specification of the desired report.')

    for report_class in get_all_reports():
        name, aliases = report_class.names[0], report_class.names[1:]

        oss = io.StringIO()
        oss.write('  {} (aliases: {}; formats: {})'.format(
            report_class.__doc__, ','.join(report_class.names),
            ','.join(report_class.get_supported_formats())))

        report_parser = subparsers.add_parser(name,
                                              aliases=aliases,
                                              description=oss.getvalue())
        report_parser.set_defaults(report_class=report_class)
        report_class.add_args(report_parser)

        # Each subparser must gather the filter arguments. This is unfortunate,
        # but it works.
        report_parser.add_argument(
            'filters',
            nargs='*',
            help='Filter expression(s) to select the subset of transactions.')

    args = parser.parse_args(args=argv)

    # Warn on filters--not supported at this time.
    if hasattr(args, 'filters') and args.filters:
        parser.error(
            ("Filters are not supported yet. Extra args: {}. "
             "See bean-query if you need filtering now.").format(args.filters))

    # Handle special commands.
    if args.help_reports:
        print(get_list_report_string())
        return

    is_check = False
    if hasattr(args, 'report_class'):
        # Open output file and guess file format.
        outfile = open(args.output, 'w') if args.output else sys.stdout
        args.format = args.format or file_utils.guess_file_format(args.output)

        # Create the requested report and parse its arguments.
        chosen_report = args.report_class(args, parser)
        if chosen_report is None:
            parser.error("Unknown report")
        is_check = isinstance(chosen_report, misc_reports.ErrorReport)

        # Verify early that the format is supported, in order to avoid parsing the
        # input file if we need to bail out.
        supported_formats = chosen_report.get_supported_formats()
        if args.format and args.format not in supported_formats:
            parser.error(
                "Unsupported format '{}' for {} (available: {})".format(
                    args.format, chosen_report.names[0],
                    ','.join(supported_formats)))

    # Force hardcore validations, just for check.
    extra_validations = (validation.HARDCORE_VALIDATIONS if is_check else None)

    logging.basicConfig(
        level=logging.INFO if args.timings else logging.WARNING,
        format='%(levelname)-8s: %(message)s')

    # Parse the input file.
    errors_file = None if args.no_errors else sys.stderr
    with misc_utils.log_time('beancount.loader (total)', logging.info):
        entries, errors, options_map = loader.load_file(
            args.filename,
            log_timings=logging.info,
            log_errors=errors_file,
            extra_validations=extra_validations)

    if hasattr(args, 'report_class'):
        # Create holdings list.
        with misc_utils.log_time('report.render', logging.info):
            try:
                chosen_report.render(entries, errors, options_map, args.format,
                                     outfile)
            except base.ReportError as exc:
                sys.stderr.write("Error: {}\n".format(exc))
                return 1
    else:
        print(get_list_report_string())

    return (1 if errors else 0)