Esempio n. 1
0
def print_extracted_entries(entries, file):
    """Print a list of entries.

    Args:
      entries: A list of extracted entries.
      file: A file object to write to.
    """
    # Print the filename and which modules matched.
    # pylint: disable=invalid-name
    pr = lambda *args: print(*args, file=file)
    pr('')

    # Print out the entries.
    for entry in entries:
        # Check if this entry is a dup, and if so, comment it out.
        if DUPLICATE_META in entry.meta:
            meta = entry.meta.copy()
            meta.pop(DUPLICATE_META)
            entry = entry._replace(meta=meta)
            entry_string = textwrap.indent(printer.format_entry(entry), '; ')
        else:
            entry_string = printer.format_entry(entry)
        pr(entry_string)

    pr('')
Esempio n. 2
0
    def assertEqualEntries(self, expected_entries, actual_entries):
        """Compare two lists of entries exactly and print missing entries verbosely if
        they occur.

        Args:
          expected_entries: Either a list of directives or a string, in which case the
            string is run through beancount.parser.parse_string() and the resulting
            list is used.
          actual_entries: Same treatment as expected_entries, the other list of
            directives to compare to.
        Raises:
          AssertionError: If the exception fails.
        """
        expected_entries = read_string_or_entries(expected_entries)
        actual_entries = read_string_or_entries(actual_entries)

        same, expected_missing, actual_missing = compare.compare_entries(expected_entries,
                                                                         actual_entries)
        if not same:
            assert expected_missing or actual_missing, "Missing is missing: {}, {}".format(
                expected_missing, actual_missing)
            oss = io.StringIO()
            if expected_missing:
                oss.write("Present in expected set and not in actual set:\n\n")
                for entry in expected_missing:
                    oss.write(printer.format_entry(entry))
                    oss.write('\n')
            if actual_missing:
                oss.write("Present in actual set and not in expected set:\n\n")
                for entry in actual_missing:
                    oss.write(printer.format_entry(entry))
                    oss.write('\n')
            self.fail(oss.getvalue())
Esempio n. 3
0
def original_txn_modified(input_txns, output_txns, errors,
                          correctly_modified_txn_text):
    # Get modified original transaction from output of plugin
    # The modified originial transaction will be the last in the list of output transactions
    try:
        last = input_txns[len(input_txns) - 1]
        modified_txn = test_utils.strip_flaky_meta([
            txn for txn in output_txns
            if txn.date == last.date and txn.narration == last.narration
        ][0])
    except IndexError as error:
        raise error
    # Get correctly modified original transaction from feature file
    correctly_modified_txn = test_utils.strip_flaky_meta(
        load_string(correctly_modified_txn_text)[0][-1])

    print(" ; RECEIVED:\n", printer.format_entry(modified_txn))
    print(" ; EXPECTED:\n", printer.format_entry(correctly_modified_txn))

    # Compare strings instead of hashes because that's an easy way to exclude filename & lineno meta.

    try:
        print("RECEIVED:\n", modified_txn)
        print("EXPECTED:\n", correctly_modified_txn)
        assert hash_entry(modified_txn) == hash_entry(correctly_modified_txn)

    except AssertionError:
        # Rethrow as a nicely formatted diff
        assert printer.format_entry(
            modified_txn) == '\n' + correctly_modified_txn_text + '\n'
        # But in case strings matches..
        raise Exception(
            "Transactions do not match, although their printed output is equal. See log output."
        )
Esempio n. 4
0
 def flush(self):
     self.log.debug("flush transactions to file")
     if self.transaction is not None:
         self.log.debug(format_entry(self.transaction))
         with open(self.filename, "a") as handle:
             handle.write("\n")
             handle.write(format_entry(self.transaction))
     else:
         self.log.debug("no transactions to flush")
     self.transaction = None
Esempio n. 5
0
def tx_not_modified(input_txns, output_txns):
    original_txn = test_utils.strip_flaky_meta(input_txns[-1])
    modified_txn = test_utils.strip_flaky_meta(output_txns[-1])
    try:
        assert hash_entry(original_txn) == hash_entry(modified_txn)
    except AssertionError:
        print("RECEIVED:", modified_txn)
        print("EXPECTED:", original_txn)
        # Rethrow as a nicely formatted diff
        assert printer.format_entry(modified_txn) == printer.format_entry(original_txn)
        # But in case strings matches..
        raise Exception("Transactions do not match, although their printed output is equal. See log output.")
Esempio n. 6
0
    def assertExcludesEntries(self, subset_entries, entries, allow_incomplete=False):
        """Check that subset_entries is not included in entries.

        Entries can be provided either as a list of directives or as a
        string. In the latter case, the string is parsed with
        beancount.parser.parse_string() and the resulting directives
        list is used. If allow_incomplete is True, light-weight
        booking is performed before comparing the directive lists,
        allowing to compare transactions with incomplete postings.

        Args:
          subset_entries: Subset entries.
          entries: Entries.
          allow_incomplete: Perform booking before comparison.

        Raises:
          AssertionError: If the exception fails.

        """
        subset_entries = read_string_or_entries(subset_entries, allow_incomplete)
        entries = read_string_or_entries(entries)

        excludes, extra = compare.excludes_entries(subset_entries, entries)
        if not excludes:
            assert extra, "Extra is empty: {}".format(extra)
            oss = io.StringIO()
            if extra:
                oss.write("Extra from from first/excluded set:\n\n")
                for entry in extra:
                    oss.write(printer.format_entry(entry))
                    oss.write('\n')
            self.fail(oss.getvalue())
Esempio n. 7
0
    def safe_add_entry(self, entry):
        """Check for possible duplicate in the existing self.entries"""

        assert '_account' not in entry.meta, str(entry)

        # Skip Open Statements
        if isinstance(entry, data.Open):
            return

        if isinstance(entry, data.Document):
            return

        # Loaded Transactions could have a match-key, to help de-duplicate
        match_key = entry.meta.get('match-key', None)
        if not match_key:
            # Roll our own match-key for some things. Sha?
            match_key = printer.format_entry(entry)

        # Tag each entry with an "Account" Based on attribute, or best guess on Postings
        if isinstance(entry, data.Transaction) and entry.postings:
            # Computed Entry, don't add
            if entry.flag == 'P':
                return

        entry.meta['_account'] = self.guess_account(entry)

        # This will posibly delete entries from our source file.
        self.remove_exising_duplicate(entry)

        # Add back the good entry at the end
        self.entries.append(entry)
        self.entries.append(entry)
Esempio n. 8
0
    def assertIncludesEntries(self, subset_entries, entries):
        """Check that subset_entries is included in entries and print missing entries.

        Args:
          subset_entries: Either a list of directives or a string, in which case the
            string is run through beancount.parser.parse_string() and the resulting
            list is used.
          entries: Same treatment as subset_entries, the other list of
            directives to compare to.
        Raises:
          AssertionError: If the exception fails.
        """
        subset_entries = read_string_or_entries(subset_entries)
        entries = read_string_or_entries(entries)

        includes, missing = compare.includes_entries(subset_entries, entries)
        if not includes:
            assert missing, "Missing is empty: {}".format(missing)
            oss = io.StringIO()
            if missing:
                oss.write("Missing from from expected set:\n\n")
                for entry in missing:
                    oss.write(printer.format_entry(entry))
                    oss.write('\n')
            self.fail(oss.getvalue())
Esempio n. 9
0
def assertExcludesEntries(subset_entries,
                          entries,
                          failfunc=DEFAULT_FAILFUNC,
                          allow_incomplete=False):
    """Check that subset_entries is not included in entries and print extra entries.

    Args:
      subset_entries: Either a list of directives or a string, in which case the
        string is run through beancount.parser.parse_string() and the resulting
        list is used.
      entries: Same treatment as subset_entries, the other list of
        directives to compare to.
      failfunc: A function to call on failure.
      allow_incomplete: A boolean, true if we allow incomplete inputs and perform
        light-weight booking.
    Raises:
      AssertionError: If the exception fails.
    """
    subset_entries = read_string_or_entries(subset_entries, allow_incomplete)
    entries = read_string_or_entries(entries)

    excludes, extra = compare.excludes_entries(subset_entries, entries)
    if not excludes:
        assert extra, "Extra is empty: {}".format(extra)
        oss = io.StringIO()
        if extra:
            oss.write("Extra from from first/excluded set:\n\n")
            for entry in extra:
                oss.write(printer.format_entry(entry))
                oss.write('\n')
        failfunc(oss.getvalue())
Esempio n. 10
0
def print_extracted_entries(extracted, output):
    """Print extracted entries.

    Entries marked as duplicates are printed as comments.

    Args:
      extracted: List of (filepath, entries) tuples where entries is
        the list of ledger entries extracted from the filepath.
      output: A file object to write to. The object just need to
       implement a .write() method.

    """
    if extracted:
        output.write(HEADER + '\n')

    for filepath, entries in extracted:
        output.write(SECTION.format(filepath) + '\n\n')

        for entry in entries:
            string = printer.format_entry(entry)
            # If the entry is a duplicate, comment it out.
            if entry.meta.get(DUPLICATE, False):
                string = textwrap.indent(string, '; ')
            output.write(string)
            output.write('\n')

        output.write('\n')
Esempio n. 11
0
def _format_entry(entry: Directive, currency_column: int) -> str:
    """Wrapper that strips unnecessary whitespace from format_entry."""
    meta = {
        key: entry.meta[key] for key in entry.meta if not key.startswith("_")
    }
    entry = entry._replace(meta=meta)
    string = align(format_entry(entry), currency_column)
    string = string.replace("<class 'beancount.core.number.MISSING'>", "")
    return "\n".join((line.rstrip() for line in string.split("\n")))
Esempio n. 12
0
File: file.py Progetto: ychaim/fava
def _format_entry(entry, fava_options):
    """Wrapper that strips unnecessary whitespace from format_entry."""
    meta = {
        key: entry.meta[key]
        for key in entry.meta if not key.startswith("_")
    }
    entry = entry._replace(meta=meta)
    string = align(format_entry(entry), fava_options)
    return "\n".join((line.rstrip() for line in string.split("\n")))
Esempio n. 13
0
 def render_beancount(self, entries, errors, options_map, file):
     dcontext = options_map['dcontext']
     price_map = prices.build_price_map(entries)
     meta = data.new_metadata('<report_prices_db>', 0)
     for base_quote in price_map.forward_pairs:
         price_list = price_map[base_quote]
         base, quote = base_quote
         for date, price in price_list:
             entry = data.Price(meta, date, base, amount.Amount(price, quote))
             file.write(printer.format_entry(entry, dcontext))
         file.write('\n')
Esempio n. 14
0
 def reload(self):
     if self.transaction is not None:
         self.log.warn("Discard transactions due to reload")
         self.log.warn(format_entry(self.transaction))
     entries, errors, options_map = loader.load_file(self.filename)
     assert(len(errors) == 0)
     self.entries = entries
     self.options_map = options_map
     self.transaction = None
     # Commonly used transformations of the entries
     self.price_entries = prices.get_last_price_entries(entries, datetime.date.today())
     self.accounts = realization.realize(self.entries)
Esempio n. 15
0
def assertEqualEntries(expected_entries,
                       actual_entries,
                       failfunc=DEFAULT_FAILFUNC,
                       allow_incomplete=False):
    """Compare two lists of entries exactly and print missing entries verbosely if
    they occur.

    Args:
      expected_entries: Either a list of directives or a string, in which case the
        string is run through beancount.parser.parse_string() and the resulting
        list is used.
      actual_entries: Same treatment as expected_entries, the other list of
        directives to compare to.
      failfunc: A function to call on failure.
      allow_incomplete: A boolean, true if we allow incomplete inputs and perform
        light-weight booking.
    Raises:
      AssertionError: If the exception fails.
    """
    expected_entries = read_string_or_entries(expected_entries,
                                              allow_incomplete)
    actual_entries = read_string_or_entries(actual_entries, allow_incomplete)

    same, expected_missing, actual_missing = compare.compare_entries(
        expected_entries, actual_entries)
    if not same:
        assert expected_missing or actual_missing, "Missing is missing: {}, {}".format(
            expected_missing, actual_missing)
        oss = io.StringIO()
        if expected_missing:
            oss.write("Present in expected set and not in actual set:\n\n")
            for entry in expected_missing:
                oss.write(printer.format_entry(entry))
                oss.write('\n')
        if actual_missing:
            oss.write("Present in actual set and not in expected set:\n\n")
            for entry in actual_missing:
                oss.write(printer.format_entry(entry))
                oss.write('\n')
        failfunc(oss.getvalue())
Esempio n. 16
0
def is_processed(input_txns, errors, config, input_txn_text, setup_txns_text, output_txns):
    input_txns[:], _, _ = load_string(setup_txns_text + input_txn_text)
    prefix_plugin_text = 'plugin "beancount_plugin_utils.example_plugin" "' + config.strip('\n') + '"\n'
    full_text = prefix_plugin_text + setup_txns_text + input_txn_text
    print('\nInput (full & raw):\n------------------------------------------------')
    print(full_text + '\n')
    output_txns[:], errors[:], _ = load_string(full_text)
    print('\nOutput (Transactions):\n------------------------------------------------\n')
    for txn in output_txns:
        print(printer.format_entry(txn))
    print('\nOutput (Errors):\n------------------------------------------------\n')
    for error in errors:
        print(printer.format_error(error))
Esempio n. 17
0
    def assertEqualEntries(self, expected_entries, actual_entries, allow_incomplete=False):
        """Check that two lists of entries are equal.

        Entries can be provided either as a list of directives or as a
        string.  In the latter case, the string is parsed with
        beancount.parser.parse_string() and the resulting directives
        list is used. If allow_incomplete is True, light-weight
        booking is performed before comparing the directive lists,
        allowing to compare transactions with incomplete postings.

        Args:
          expected_entries: Expected entries.
          actual_entries: Actual entries.
          allow_incomplete: Perform booking before comparison.

        Raises:
          AssertionError: If the exception fails.

        """
        expected_entries = read_string_or_entries(expected_entries, allow_incomplete)
        actual_entries = read_string_or_entries(actual_entries, allow_incomplete)

        same, expected_missing, actual_missing = \
            compare.compare_entries(expected_entries, actual_entries)
        if not same:
            assert expected_missing or actual_missing, \
                "Missing is missing: {}, {}".format(expected_missing, actual_missing)
            oss = io.StringIO()
            if expected_missing:
                oss.write("Present in expected set and not in actual set:\n\n")
                for entry in expected_missing:
                    oss.write(printer.format_entry(entry))
                    oss.write('\n')
            if actual_missing:
                oss.write("Present in actual set and not in expected set:\n\n")
                for entry in actual_missing:
                    oss.write(printer.format_entry(entry))
                    oss.write('\n')
            self.fail(oss.getvalue())
Esempio n. 18
0
 def add_posting(self, user, item_id):
     assert(user in self.get_users())
     account = "Assets:Receivable:{}".format(user)
     if self.transaction is None:
         self.transaction = Transaction(
             flag="*",
             payee="kitty",
             narration="cococount purchase",
             tags=None,
             postings=[Posting("Assets:Items", number.MISSING, None, None, None, {})],
             links=None,
             meta={},
             date=datetime.date.today())
     price = self.latest_price(item_id)
     self.transaction.postings.append(
             Posting(account, price.amount, None, None, None, {}))
     self.log.debug(format_entry(self.transaction))
Esempio n. 19
0
    def body(self, parent):
        self.diagFrame = Frame(self)
        self.diagFrame.pack(side=TOP, fill=BOTH)

        row_id = 0
        self.promt = Label(self.diagFrame, text='Please specify the rule.')
        self.promt.grid(row=row_id, sticky=W + E, columnspan=5)
        row_id += 1

        if self.entry:
            self.entryLabel = Label(self.diagFrame,
                                    text=printer.format_entry(self.entry),
                                    justify=LEFT,
                                    relief=SUNKEN,
                                    wraplength=400)
            self.entryLabel.grid(row=row_id, columnspan=5)
            row_id += 1

        self.changes = {}

        if self.entry and isinstance(self.entry, Transaction):
            kind = 'Transaction'
        elif self.rule:
            kind = self.rule['kind']

        if kind not in RULES:
            return self.promt

        elif kind == 'Transaction':
            Label(self.diagFrame,
                  text='Transaction Attributes').grid(row=row_id,
                                                      columnspan=5,
                                                      sticky=E + W)
            row_id += 1

            for col, n in enumerate(
                ['Field', 'Relation', 'Pattern', 'Ch. Method', 'New Value']):
                Label(self.diagFrame, text=n).grid(row=row_id,
                                                   column=col,
                                                   sticky=E + W)
            row_id += 1

            self.recursiveMakeChangable(self.newRule, row_id, self.entry,
                                        self.rule)

        return self.promt
Esempio n. 20
0
def is_processed(input_txns, errors, config, input_txn_text, setup_txns_text,
                 output_txns):
    text = 'plugin "beancount_share.share" "' + config.strip(
        '\n') + '"\n' + setup_txns_text + input_txn_text
    print(
        '\nInput (full & raw):\n------------------------------------------------\n'
        + text + '\n')
    output_txns[:], errors[:], _ = load_string(text)
    print(
        '\nOutput (Transactions):\n------------------------------------------------\n'
    )
    for txn in output_txns:
        print(printer.format_entry(txn))
    print(
        '\nOutput (Errors):\n------------------------------------------------\n'
    )
    for error in errors:
        print(printer.format_error(error))
Esempio n. 21
0
 def render_htmldiv(self, entries, errors, options_map, file):
     dcontext = options_map['dcontext']
     file.write('<div id="errors">\n')
     for error in errors:
         file.write('<div class="error">\n')
         if hasattr(error, 'source'):
             file.write('<a class="source" href="{}">{}</a><br/>\n'.format(
                 self.formatter.render_source(error.source),
                 printer.render_source(error.source)))
         file.write('<span class="error-message">{}</span>\n'.format(
             error.message))
         if error.entry is not None:
             file.write('<pre class="syntax">\n')
             file.write(textwrap.indent(
                 printer.format_entry(error.entry, dcontext), '  '))
             file.write('</pre>\n')
         file.write('</div>\n')
     file.write('</div>\n')
Esempio n. 22
0
def is_processed(variant, input_txns, errors, config, input_txn_text,
                 setup_txns_text, output_txns):
    input_txns[:], _, _ = load_string(setup_txns_text + input_txn_text)

    if variant == 'depr':
        prefix_plugin_text = 'plugin "beancount_interpolate.depreciate" "' + config.strip(
            '\n') + '"\n'
    elif variant == 'recur':
        prefix_plugin_text = 'plugin "beancount_interpolate.recur" "' + config.strip(
            '\n') + '"\n'
    elif variant == 'split':
        prefix_plugin_text = 'plugin "beancount_interpolate.split" "' + config.strip(
            '\n') + '"\n'
    elif variant == 'spread':
        prefix_plugin_text = 'plugin "beancount_interpolate.spread" "' + config.strip(
            '\n') + '"\n'
    elif variant == 'all':
        prefix_plugin_text = 'plugin "beancount_interpolate.depreciate" "' + config.strip(
            '\n') + '"\n'
        prefix_plugin_text = prefix_plugin_text + 'plugin "beancount_interpolate.recur" "' + config.strip(
            '\n') + '"\n'
        prefix_plugin_text = prefix_plugin_text + 'plugin "beancount_interpolate.split" "' + config.strip(
            '\n') + '"\n'
        prefix_plugin_text = prefix_plugin_text + 'plugin "beancount_interpolate.spread" "' + config.strip(
            '\n') + '"\n'
    else:
        raise RuntimeError('Unknown variant: "{}".'.format(variant))

    full_text = prefix_plugin_text + setup_txns_text + input_txn_text
    print(
        '\nInput (full & raw):\n------------------------------------------------'
    )
    print(full_text + '\n')
    output_txns[:], errors[:], _ = load_string(full_text)
    print(
        '\nOutput (Transactions):\n------------------------------------------------\n'
    )
    for txn in output_txns:
        print(printer.format_entry(txn))
    print(
        '\nOutput (Errors):\n------------------------------------------------\n'
    )
    for error in errors:
        print(printer.format_error(error))
Esempio n. 23
0
def main():
    optparser = argparse.ArgumentParser(description=__doc__)
    optparser.add_argument('filename', help='Transactions to be considered')
    optparser.add_argument('filename_diff', help='Transactions to be removed')

    optparser.add_argument('-q',
                           '--quiet',
                           action='store_true',
                           help="Don't print file or line numbers.")

    args = optparser.parse_args()

    # Parse the ledger files.
    entries, errors, options = loader.load_file(args.filename,
                                                log_errors=logging.error)
    entries_diff, errors_diff, options_diff = loader.load_file(
        args.filename_diff, log_errors=logging.error)

    # Create a mapping from links to lists of transactions to find.
    link_map = collections.defaultdict(list)
    for entry in data.filter_txns(entries_diff):
        for link in entry.links:
            link_map[link].append(entry)

    # Filter out the transactions.
    filtered_entries = []
    for entry in data.filter_txns(entries):
        for link in entry.links:
            if link in link_map:
                break
        else:
            filtered_entries.append(entry)

    # Print out something about each entry.
    for entry in filtered_entries:
        if not args.quiet:
            print()
            print('{}'.format(printer.render_source(entry.meta)))
            print()
        print(printer.format_entry(entry))
Esempio n. 24
0
def do_roundtrip(filename, unused_args):
    """Round-trip test on arbitrary Ledger.

    Read a Ledger's transactions, print them out, re-read them again and compare
    them. Both sets of parsed entries should be equal. Both printed files are
    output to disk, so you can also run diff on them yourself afterwards.

    Args:
      filename: A string, the Beancount input filename.
    """
    from beancount.parser import printer
    from beancount.core import compare
    from beancount import loader

    round1_filename = round2_filename = None
    try:
        logging.basicConfig(level=logging.INFO,
                            format='%(levelname)-8s: %(message)s')
        logging.info("Read the entries")
        entries, errors, options_map = loader.load_file(filename)
        printer.print_errors(errors, file=sys.stderr)

        logging.info("Print them out to a file")
        basename, extension = path.splitext(filename)
        round1_filename = ''.join([basename, '.roundtrip1', extension])
        with open(round1_filename, 'w') as outfile:
            printer.print_entries(entries, file=outfile)

        logging.info("Read the entries from that file")

        # Note that we don't want to run any of the auto-generation here, but
        # parsing now returns incomplete objects and we assume idempotence on a
        # file that was output from the printer after having been processed, so
        # it shouldn't add anything new. That is, a processed file printed and
        # resolve when parsed again should contain the same entries, i.e.
        # nothing new should be generated.
        entries_roundtrip, errors, options_map = loader.load_file(
            round1_filename)

        # Print out the list of errors from parsing the results.
        if errors:
            print(
                ',----------------------------------------------------------------------'
            )
            printer.print_errors(errors, file=sys.stdout)
            print(
                '`----------------------------------------------------------------------'
            )

        logging.info("Print what you read to yet another file")
        round2_filename = ''.join([basename, '.roundtrip2', extension])
        with open(round2_filename, 'w') as outfile:
            printer.print_entries(entries_roundtrip, file=outfile)

        logging.info("Compare the original entries with the re-read ones")
        same, missing1, missing2 = compare.compare_entries(
            entries, entries_roundtrip)
        if same:
            logging.info('Entries are the same. Congratulations.')
        else:
            logging.error('Entries differ!')
            print()
            print('\n\nMissing from original:')
            for entry in entries:
                print(entry)
                print(compare.hash_entry(entry))
                print(printer.format_entry(entry))
                print()

            print('\n\nMissing from round-trip:')
            for entry in missing2:
                print(entry)
                print(compare.hash_entry(entry))
                print(printer.format_entry(entry))
                print()
    finally:
        for rfilename in (round1_filename, round2_filename):
            if path.exists(rfilename):
                os.remove(rfilename)
Esempio n. 25
0
def _format_entry(entry):
    """Wrapper that strips unnecessary whitespace from format_entry."""
    string = format_entry(entry)
    return '\n'.join((line.rstrip() for line in string.split('\n')))
Esempio n. 26
0
	except:
		pass

if instance == None:
	print("No suitable importer!")
	exit(1)

new_entries = instance.parse()


with open(args.out, 'w') as f:
	printer.print_entries(new_entries, file = f)

print('Outputed to ' + args.out)
exit(0)

file = parser.parse_one('''
2018/01/15 * "测试" "测试"
	Assets:Test 300 CNY
	Income:Test

''')
print(file.postings)


file.postings[0] = file.postings[0]._replace(units = file.postings[0].units._replace(number = 100))
print(file.postings[0])

data = printer.format_entry(file)
print(data)
Esempio n. 27
0
    def safe_add_entry(self, entry):
        """Check for possible duplicate in the existing self.entries"""

        if self.year:
            if entry.date.year != self.year:
                return

        # Loaded Transactions could have a match-key, to help de-duplicate
        match_key = entry.meta.get('match-key', None)
        if not match_key:
            # Roll our own match-key for some things
            match_key = printer.format_entry(entry)

        if isinstance(entry, data.Transaction) and entry.postings:
            # Sort on the first account
            for posting in entry.postings:
                account_parts = account.split(posting.account)
                if account_parts[0] in ("Assets", "Liabilities"):
                    entry.meta['_account'] = posting.account
                    break
            else:
                # Use last account?
                entry.meta['_account'] = entry.postings[0]
        elif hasattr(entry, 'account'):
            entry.meta['_account'] = entry.account
        else:
            entry.meta['_account'] = 'other'

        found_match = False
        existing_entry = None
        remove_list = []

        # TODO do a yaml.load(match_key) to support list
        count = 0
        while match_key and not found_match:
            existing_entry = None
            if match_key in self.duplicates:
                existing_entry = self.duplicates.get(match_key)
            if existing_entry:
                # Don't do anything since it's duplicate
                found_match = True
            else:
                # Make note of this match-key
                self.duplicates[match_key] = entry
            count += 1
            # We support multiple match keys in the format 'match-key-1' .. 'match-key-N'
            match_key = entry.meta.get(f'match-key-{count}', None)

        if found_match:
            # We only "preserve" * entries.  Others might be overwritten.
            if not hasattr(existing_entry, 'flag'):
                # No need to check flags
                return
            # Make sure the existing_entry isn't "booked" with a '*'
            if existing_entry.flag == entry.flag or existing_entry.flag == '*':
                return
            else:
                # We need to replace the existing entry!
                remove_list.append(existing_entry)

        for item in remove_list:
            if item in self.entries:
                self.entries.remove(item)

        self.entries.append(entry)
Esempio n. 28
0
    def body(self, parent):
        self.diagFrame = Frame(self)
        self.diagFrame.pack(side=TOP, fill=BOTH)

        self.promt = Label(self.diagFrame,
                           text='Could not match this entry to any rule. '
                           'Please make changes manually or add a new '
                           'rule.')
        self.promt.grid(row=0, sticky=W, columnspan=2)

        self.entryLabel = Label(self.diagFrame,
                                text=printer.format_entry(self.entry),
                                justify=LEFT,
                                relief=SUNKEN,
                                wraplength=400)
        self.entryLabel.grid(row=1, columnspan=2)

        self.changes = {}

        row_id = 2
        if isinstance(self.entry, Transaction):
            Label(self.diagFrame,
                  text='Transaction Attributes').grid(row=row_id, columnspan=2)
            row_id += 1
            name = 'payee'
            self.changes[name] = \
                self.make_changable(name.title(),
                                    getattr(self.entry, name),
                                    row_id)
            row_id += 1

            name = 'narration'
            self.changes[name] = \
                self.make_changable(name.title(),
                                    getattr(self.entry, name),
                                    row_id)
            row_id += 1

            name = 'meta'
            self.changes[name] = \
                self.make_changable(name.title(),
                                    getattr(self.entry, name),
                                    row_id)
            row_id += 1

            name = 'tags'
            tags = getattr(self.entry, name)
            if tags is not None:
                tags = ', '.join(tags)
            self.changes[name] = \
                self.make_changable(name.title(),
                                    tags,
                                    row_id)
            row_id += 1

            name = 'links'
            links = getattr(self.entry, name)
            if links is not None:
                links = ', '.join(links)
            self.changes[name] = \
                self.make_changable(name.title(),
                                    links,
                                    row_id)
            row_id += 1

            Label(self.diagFrame, text='Postings').grid(row=row_id,
                                                        columnspan=2)
            row_id += 1

            self.changes['postings'] = []
            for idx, p in enumerate(self.entry.postings):
                p_changes = {}
                name = 'flag'
                p_changes[name] = \
                    self.make_changable(name.title(),
                                        getattr(p, name),
                                        row_id)
                row_id += 1

                name = 'account'
                p_changes[name] = \
                    self.make_changable(name.title(),
                                        getattr(p, name),
                                        row_id,
                                        'menu')
                row_id += 1

                self.changes['postings'].append(p_changes)

        addRule = Button(self.diagFrame, text='Add Rule', command=self.addRule)
        addRule.grid(row=row_id, column=0, sticky=N + E + S + W, columnspan=2)

        return self.promt
Esempio n. 29
0
File: file.py Progetto: stephas/fava
def _format_entry(entry, fava_options):
    """Wrapper that strips unnecessary whitespace from format_entry."""
    string = align(format_entry(entry), fava_options)
    return '\n'.join((line.rstrip() for line in string.split('\n')))
Esempio n. 30
0
def _format_entry(entry, fava_options):
    """Wrapper that strips unnecessary whitespace from format_entry."""
    string = align(format_entry(entry), fava_options)
    return '\n'.join((line.rstrip() for line in string.split('\n')))
Esempio n. 31
0
def main():

    options = eval_args('Parse bank csv file and import into beancount')
    args = init_config(options.file, options.debug)
    
    import_csv = args.csv.target + '/' + args.csv.ref + '.csv' 

    if not os.path.isfile(import_csv):
        print("file: %s does not exist!" % (import_csv))
        sys.exit(-1)

    # init report data
    tx_in_file = 0
    processed = 0
    error = 0
    hash_collision = 0
    ignored_by_rule = 0
    transactions = {}
    rule_engine = init_rule_engine(args)
    tx_hashes = load_journal_hashes(args.rules.bc_file)

    accounts = set()

    with open(import_csv) as csv_file:
        csv_reader = csv.reader(csv_file, delimiter=args.csv.separator)
        for i in range(args.csv.skip):
            next(csv_reader)  # skip the line
        for row in csv_reader:
            tx_in_file += 1
            try:
                if args.debug:
                    print("> processing\n" + str(row))
                md5 = hashlib.md5(",".join(row).encode("utf-8")).hexdigest()
                # keep track of the accounts for each tx:
                # the system expects one account per imported file
                res_account = get_account(row, args)
                if args.debug:
                    print("resolved account: " + str(res_account))
                accounts.add(res_account)

                if md5 not in tx_hashes:
                    tx_date = datetime.strptime(
                        row[args.indexes.date].strip(), args.csv.date_format
                    )
                    tx_meta = {"csv": ",".join(row), "md5": md5}
                    tx = rule_engine.execute(row)

                    if tx:
                        """ 
                        Handle the origin account: if the tx processed by the
                        rules engin has no origin account, try to assign one
                        from the property file: args.rules.origin_account 
                        """ 
                        if tx.postings[0].account is None:
                            raise Exception(
                                'Unable to resolve the origin account for this transaction, '
                                'please check that the `Replace_Asset` rule '
                                'is in use for this account or set the `origin_account` property '
                                'in the config file.'
                            )
                    
                        # replace date """
                        tx = tx._replace(date=str(tx_date.date()))

                        # add md5 and csv """
                        tx = tx._replace(meta=tx_meta)

                        # get a decimal, with the minus sign, if it's an expense
                        amount = resolve_amount(row, args)

                        # add units (how much was spent)
                        new_posting = tx.postings[0]._replace(
                            units=Amount(amount, get_currency(row, args))
                        )
                        tx = tx._replace(
                            postings=[new_posting] + [tx.postings[1]])

                        if args.debug:
                            print(tx)

                        # generate a key based on:
                        # - the tx date
                        # - a random time (tx time is not important, but date is!)
                        transactions[str(tx_date) +
                                     str(gen_datetime().time())] = tx
                    else:
                        ignored_by_rule += 1
                else:
                    print(
                        "warning: a transaction with identical hash exists in the journal: "
                        + md5
                    )
                    log_error(row)
                    hash_collision += 1

            except Exception as e:
                print("error: " + str(e))
                log_error(row)
                error += 1
                if args.debug:
                    traceback.print_exc()

        # write transaction to ledger file corresponding to the account id
        if len(accounts) == 1 and transactions:

            with open(accounts.pop() + ".ldg", "a") as exc:
                for key in sorted(transactions):
                    exc.write(format_entry(transactions[key]) + "\n")
                    processed += 1
        else:
            if len(transactions) > 0:
                print(
                    "Expecting only one account in csv file, found: "
                    + str(len(accounts))
                )

    print("\nsummary:\n")
    print("csv tx count: \t\t" + str(tx_in_file))
    print("imported: \t\t" + str(processed))
    print("tx already present: \t" + str(hash_collision))
    print("ignored by rule: \t" + str(ignored_by_rule))
    print("error: \t\t\t" + str(error))