def test_api_context( test_client: FlaskClient, snapshot: SnapshotFunc, example_ledger: FavaLedger, ) -> None: response = test_client.get("/long-example/api/context") assert_api_error( response, "Invalid API request: Parameter `entry_hash` is missing." ) entry_hash = hash_entry( next( entry for entry in example_ledger.all_entries_by_type.Transaction if entry.narration == r"Investing 40% of cash in VBMPX" and entry.date == datetime.date(2016, 5, 9) ) ) response = test_client.get( f"/long-example/api/context?entry_hash={entry_hash}" ) assert_api_success(response) snapshot(response.json) entry_hash = hash_entry(example_ledger.all_entries[10]) response = test_client.get( f"/long-example/api/context?entry_hash={entry_hash}" ) assert_api_success(response) assert response.json assert not response.json.get("balances_before") snapshot(response.json)
def original_txn_modified(input_txns, output_txns, errors, correctly_modified_txn_text): # Get modified original transaction from output of plugin # The modified originial transaction will be the last in the list of output transactions try: last = input_txns[len(input_txns) - 1] modified_txn = test_utils.strip_flaky_meta([ txn for txn in output_txns if txn.date == last.date and txn.narration == last.narration ][0]) except IndexError as error: raise error # Get correctly modified original transaction from feature file correctly_modified_txn = test_utils.strip_flaky_meta( load_string(correctly_modified_txn_text)[0][-1]) print(" ; RECEIVED:\n", printer.format_entry(modified_txn)) print(" ; EXPECTED:\n", printer.format_entry(correctly_modified_txn)) # Compare strings instead of hashes because that's an easy way to exclude filename & lineno meta. try: print("RECEIVED:\n", modified_txn) print("EXPECTED:\n", correctly_modified_txn) assert hash_entry(modified_txn) == hash_entry(correctly_modified_txn) except AssertionError: # Rethrow as a nicely formatted diff assert printer.format_entry( modified_txn) == '\n' + correctly_modified_txn_text + '\n' # But in case strings matches.. raise Exception( "Transactions do not match, although their printed output is equal. See log output." )
def tx_not_modified(input_txns, output_txns): original_txn = test_utils.strip_flaky_meta(input_txns[-1]) modified_txn = test_utils.strip_flaky_meta(output_txns[-1]) try: assert hash_entry(original_txn) == hash_entry(modified_txn) except AssertionError: print("RECEIVED:", modified_txn) print("EXPECTED:", original_txn) # Rethrow as a nicely formatted diff assert printer.format_entry(modified_txn) == printer.format_entry(original_txn) # But in case strings matches.. raise Exception("Transactions do not match, although their printed output is equal. See log output.")
def compare_entries(entries1: List[Directive], entries2: List[Directive]): hashes1 = { hash_entry(entry, exclude_meta=True): entry for entry in entries1} hashes2 = { hash_entry(entry, exclude_meta=True): entry for entry in entries2} keys1 = Counter(hash_entry(entry, exclude_meta=True) for entry in entries1) keys2 = Counter(hash_entry(entry, exclude_meta=True) for entry in entries2) same = keys1 == keys2 missing1 = data.sorted([hashes1[key] for key in keys1 - keys2]) missing2 = data.sorted([hashes2[key] for key in keys2 - keys1]) return same, missing1, missing2
def test_hash_with_exclude_meta(self): entries, _, __ = loader.load_string(""" 2013-06-22 * "La Colombe" "Buying coffee" ^ee89ada94a39 Expenses:Coffee 5 USD Assets:US:Cash 2013-06-22 * "La Colombe" "Buying coffee" ^ee89ada94a39 Expenses:Coffee 5 USD Assets:US:Cash """) self.assertNotEqual(compare.hash_entry(entries[0], exclude_meta=False), compare.hash_entry(entries[1], exclude_meta=False)) self.assertEqual(compare.hash_entry(entries[0], exclude_meta=True), compare.hash_entry(entries[1], exclude_meta=True))
def pullfrom(entries, options_map): errors = [] new_entries = [] for entry in entries: add_entries = [] if isinstance(entry, Transaction): new_postings = [] for posting in entry.postings: if 'pull_from' in posting.meta: memo = posting.meta.get('memo', '') new_acct = posting.meta['pull_from'] new_posting = posting._replace(units=-posting.units, meta=None) new_postings.append(new_posting) new_posting = posting._replace(account=new_acct, meta=None) new_postings.append(new_posting) link_id = set(['pull-' + compare.hash_entry(entry)]) new_links = entry.links | link_id entry = entry._replace(links=new_links) new_meta = posting.meta.copy() del new_meta['pull_from'] new_meta.pop('memo', None) new_meta['filename'] = entry.meta['filename'] new_meta['lineno'] = entry.meta['lineno'] new_entry = entry._replace(payee=None, narration=memo, tags=set(), links=link_id, meta=new_meta, postings=new_postings) add_entries.append(new_entry) new_entries.append(entry) new_entries.extend(add_entries) return new_entries, errors
def context_(ehash=None): "Render the before & after context around a transaction entry." matching_entries = [ entry for entry in app.entries if ehash == compare.hash_entry(entry) ] oss = io.StringIO() if len(matching_entries) == 0: print("ERROR: Could not find matching entry for '{}'".format(ehash), file=oss) elif len(matching_entries) > 1: print("ERROR: Ambiguous entries for '{}'".format(ehash), file=oss) print(file=oss) dcontext = app.options['dcontext'] printer.print_entries(matching_entries, dcontext, file=oss) else: dcontext = app.options['dcontext'] oss.write("<pre>\n") for entry in matching_entries: oss.write( context.render_entry_context(app.entries, app.options, dcontext, entry.meta["filename"], entry.meta["lineno"])) oss.write("</pre>\n") return render_global(pagetitle="Context: {}".format(ehash), contents=oss.getvalue())
def link_documents(entries: Entries, _: Any) -> Tuple[Entries, List[DocumentError]]: """Link transactions to documents.""" errors = [] transactions = [] by_fullname = {} by_basename = defaultdict(list) for index, entry in enumerate(entries): if isinstance(entry, Document): by_fullname[entry.filename] = index by_basename[basename(entry.filename)].append((index, entry)) elif isinstance(entry, Transaction): transactions.append((index, entry)) for index, entry in transactions: disk_docs = [ value for key, value in entry.meta.items() if key.startswith("document") ] if not disk_docs: continue hash_ = hash_entry(entry)[:8] txn_accounts = [pos.account for pos in entry.postings] for disk_doc in disk_docs: documents = [ j for j, document in by_basename[disk_doc] if document.account in txn_accounts ] disk_doc_path = normpath( join(dirname(entry.meta["filename"]), disk_doc)) if disk_doc_path in by_fullname: documents.append(by_fullname[disk_doc_path]) if not documents: errors.append( DocumentError( entry.meta, f"Document not found: '{disk_doc}'", entry, )) continue for j in documents: # Since we might link a document multiple times, we have to use # the index for the replacement here. doc: Document = entries[j] # type: ignore entries[j] = doc._replace( links=add_to_set(doc.links, hash_), tags=add_to_set(doc.tags, "linked"), ) entries[index] = entry._replace( links=add_to_set(entry.links, hash_)) return entries, errors
def context(self, ehash): try: entry = next(entry for entry in self.all_entries if ehash == compare.hash_entry(entry)) except StopIteration: return context_str = context.render_entry_context(self.all_entries, self.options, entry) return {"context": context_str.split("\n", 2)[2], "entry": entry}
def settlement_date(entries, options_map, config): errors = [] for index, entry in enumerate(entries): if isinstance(entry, data.Transaction): for p_index, posting in enumerate(entry.postings): if posting.meta and "settle" in posting.meta: save_config = None postings = entry.postings s_date = posting.meta["settle"] link = "settle-{}".format(compare.hash_entry(entry)) original_account = posting.account if postings[p_index].units.number < 0: save_config = config config = "Liabilities:AccountsPayable" entry.postings[p_index] = entry.postings[p_index]._replace( account=config) links = (set(entries[index].links).union([link]) if entries[index].links else set([link])) entries[index] = entry._replace(postings=postings) entries[index] = entry._replace(links=links) # do not settle future dates yet if s_date >= date.today(): config = save_config if save_config else config continue new_posting = postings[p_index] new_posting = new_posting._replace(meta=dict()) postings = [new_posting, new_posting] postings[0] = postings[0]._replace(account=config) postings[0] = postings[0]._replace( units=postings[1].units._replace( number=postings[1].units.number * -1)) postings[1] = postings[1]._replace( account=original_account) if save_config: postings.reverse() entries.append( data.Transaction( entry.meta, s_date, entry.flag, "", "Settle: {}".format(entry.narration), entry.tags, set([link]), postings, )) config = save_config if save_config else config # break # allow use of multiple 'settle' return entries, errors
def queries(self, query_hash=None): if not query_hash: return self._journal(self.all_entries, Query) matching_entries = [entry for entry in self.all_entries if query_hash == compare.hash_entry(entry)] if not matching_entries: return assert len(matching_entries) == 1 return serialize_entry(matching_entries[0])
def context(self, ehash): try: entry = next(entry for entry in self.all_entries if ehash == compare.hash_entry(entry)) except StopIteration: return context_str = context.render_entry_context(self.all_entries, self.options, entry) return { 'context': context_str.split("\n", 2)[2], 'entry': entry, }
def test_api_context( test_client: FlaskClient, snapshot: SnapshotFunc, example_ledger: FavaLedger, ) -> None: response = test_client.get("/long-example/api/context") assert_api_error( response, "Invalid API request: Parameter `entry_hash` is missing.") entry_hash = hash_entry( next(entry for entry in example_ledger.all_entries if entry.meta["lineno"] == 3732)) response = test_client.get( f"/long-example/api/context?entry_hash={entry_hash}") assert_api_success(response) snapshot(response.json) entry_hash = hash_entry(example_ledger.all_entries[10]) response = test_client.get( f"/long-example/api/context?entry_hash={entry_hash}") assert_api_success(response) assert response.json assert not response.json.get("balances_before") snapshot(response.json)
def get_entry(self, entry_hash: str) -> Directive: """Find an entry. Arguments: entry_hash: Hash of the entry. Returns: The entry with the given hash. Raises: FavaAPIException: If there is no entry for the given hash. """ try: return next(entry for entry in self.all_entries if entry_hash == hash_entry(entry)) except StopIteration: raise FavaAPIException(f'No entry found for hash "{entry_hash}"')
def link_documents(entries, _): errors = [] all_documents = [(index, entry) for index, entry in enumerate(entries) if isinstance(entry, data.Document)] transactions = [(index, entry) for index, entry in enumerate(entries) if isinstance(entry, data.Transaction)] for index, entry in transactions: disk_docs = [ value for key, value in entry.meta.items() if key.startswith("document") ] _hash = hash_entry(entry)[:8] for disk_doc in disk_docs: disk_doc_path = normpath( join(dirname(entry.meta["filename"]), disk_doc)) documents = [ (j, document) for j, document in all_documents if (document.filename == disk_doc_path) or (document.account in [pos.account for pos in entry.postings] and basename(document.filename) == disk_doc) ] if not documents: errors.append( DocumentError( entry.meta, "Document not found: {}".format(disk_doc), entry, )) continue for j, document in documents: tags = (set(document.tags).union(["linked"]).difference( ["discovered"]) if document.tags else set(["linked"])) links = (set(document.links).union([_hash]) if document.links else set([_hash])) entries[j] = document._replace(links=links, tags=tags) links = (set(entry.links).union([_hash]) if entry.links else set([_hash])) entries[index] = entry._replace(links=links) return entries, errors
def link_statements(entries, options_map): errors = [] if 'documents' not in options_map or len(options_map['documents']) == 0: return entries, errors all_documents = [(i, entry) for i, entry in enumerate(entries) if isinstance(entry, data.Document)] all_transactions = [(i, entry) for i, entry in enumerate(entries) if isinstance(entry, data.Transaction)] for i, entry in all_transactions: statements = [value for key, value in entry.meta.items() if key.startswith('statement')] _hash = hash_entry(entry)[:8] for statement in statements: statement_p = normpath(join(dirname(entry.meta['filename']), statement)) documents = [(j, document) for j, document in all_documents if (document.filename == statement_p) or (document.account in [pos.account for pos in entry.postings] and basename(document.filename) == statement)] if (len(documents) == 0): errors.append( StatementDocumentError( entry.meta, "Statement Document not found: {}".format(statement), entry)) continue for j, document in documents: tags = set(document.tags).union( ['statement']).difference(['discovered']) \ if document.tags else set(['statement']) links = set(document.links).union([_hash]) \ if document.links else set([_hash]) entries[j] = document._replace(links=links, tags=tags) links = set(entry.links).union([_hash]) \ if entry.links else set([_hash]) entries[i] = entry._replace(links=links) return entries, errors
def link_statements(entries, _): errors = [] all_documents = [(index, entry) for index, entry in enumerate(entries) if isinstance(entry, data.Document)] transactions = [(index, entry) for index, entry in enumerate(entries) if isinstance(entry, data.Transaction)] for index, entry in transactions: statements = [ value for key, value in entry.meta.items() if key.startswith('statement') ] _hash = hash_entry(entry)[:8] for statement in statements: statement_path = normpath( join(dirname(entry.meta['filename']), statement)) documents = [ (j, document) for j, document in all_documents if (document.filename == statement_path) or (document.account in [pos.account for pos in entry.postings] and basename(document.filename) == statement) ] if not documents: errors.append( StatementDocumentError( entry.meta, "Statement Document not found: {}".format(statement), entry)) continue for j, document in documents: tags = set(document.tags).union( ['statement']).difference(['discovered']) \ if document.tags else set(['statement']) links = set(document.links).union([_hash]) \ if document.links else set([_hash]) entries[j] = document._replace(links=links, tags=tags) links = set(entry.links).union([_hash]) \ if entry.links else set([_hash]) entries[index] = entry._replace(links=links) return entries, errors
def get_entry(self, entry_hash): """Find an entry. Arguments: entry_hash: Hash of the entry. Returns: The entry with the given hash. Raises: FavaAPIException: If there is no entry for the given hash. """ try: return next(entry for entry in self.all_entries if entry_hash == hash_entry(entry)) except StopIteration: raise FavaAPIException('No entry found for hash "{}"' .format(entry_hash))
def context(self, ehash): matching_entries = [entry for entry in self.all_entries if ehash == compare.hash_entry(entry)] if not matching_entries: return # the hash should uniquely identify the entry assert len(matching_entries) == 1 entry = matching_entries[0] context_str = context.render_entry_context(self.all_entries, self.options, entry) return { "hash": ehash, "context": context_str.split("\n", 2)[2], "filename": entry.meta["filename"], "lineno": entry.meta["lineno"], "journal": matching_entries, }
def _add_metadata(new_entry, entry): new_entry['meta'] = { 'type': entry.__class__.__name__.lower(), } new_entry['hash'] = compare.hash_entry(entry) if entry.meta: new_entry['meta']['filename'] = getattr(entry.meta, 'filename', None) new_entry['meta']['lineno'] = getattr(entry.meta, 'lineno', None) new_entry['metadata'] = entry.meta.copy() new_entry['metadata'].pop("__tolerances__", None) new_entry['metadata'].pop("__automatic__", None) new_entry['metadata'].pop("filename", None) new_entry['metadata'].pop("lineno", None) return new_entry
def link_documents(entries, _): errors = [] all_documents = [(index, entry) for index, entry in enumerate(entries) if isinstance(entry, data.Document)] transactions = [(index, entry) for index, entry in enumerate(entries) if isinstance(entry, data.Transaction)] for index, entry in transactions: disk_docs = [value for key, value in entry.meta.items() if key.startswith('document')] _hash = hash_entry(entry)[:8] for disk_doc in disk_docs: disk_doc_path = normpath(join(dirname(entry.meta['filename']), disk_doc)) documents = [(j, document) for j, document in all_documents if (document.filename == disk_doc_path) or (document.account in [pos.account for pos in entry.postings] and basename(document.filename) == disk_doc)] if not documents: errors.append( DocumentError( entry.meta, "Document not found: {}".format(disk_doc), entry)) continue for j, document in documents: tags = set(document.tags).union( ['linked']).difference(['discovered']) \ if document.tags else set(['linked']) links = set(document.links).union([_hash]) \ if document.links else set([_hash]) entries[j] = document._replace(links=links, tags=tags) links = set(entry.links).union([_hash]) \ if entry.links else set([_hash]) entries[index] = entry._replace(links=links) return entries, errors
def context(self, ehash=None): matching_entries = [ entry for entry in self.entries if ehash == compare.hash_entry(entry) ] contexts = [] dcontext = self.options['dcontext'] for entry in matching_entries: context_str = context.render_entry_context(self.entries, self.options, entry) hash_ = context_str.split("\n", 2)[0].split(':')[1].strip() filenamelineno = context_str.split("\n", 2)[1] filename = filenamelineno.split(":")[1].strip() lineno = int(filenamelineno.split(":")[2].strip()) contexts.append({ 'hash': hash_, 'context': context_str.split("\n", 2)[2], 'filename': filename, 'line': lineno }) # TODO # if len(matching_entries) == 0: # print("ERROR: Could not find matching entry for '{}'".format(ehash), # file=oss) # # elif len(matching_entries) > 1: # print("ERROR: Ambiguous entries for '{}'".format(ehash), # file=oss) # print(file=oss) # dcontext = app.options['dcontext'] # printer.print_entries(matching_entries, dcontext, file=oss) # # else: return { 'hash': ehash, 'contexts': contexts, 'journal': self._journal_for_postings(matching_entries) }
def tracking(entries, options_map): account_types = get_account_types(options_map) income_tracking = set() expense_tracking = set() errors = [] new_entries = [] for entry in entries: new_entry = None if isinstance(entry, Open) and entry.meta.get('tracking', False): if is_account_type(account_types.expenses, entry.account): expense_tracking.add(entry.account) elif is_account_type(account_types.income, entry.account): income_tracking.add(entry.account) elif isinstance(entry, Transaction): new_postings = [] tracking_balance = Inventory() for posting in entry.postings: if 'tracking' in posting.meta: new_acct = posting.meta['tracking'] new_posting = posting._replace(account=new_acct, meta=None) new_postings.append(new_posting) tracking_balance.add_position(posting) if new_postings: for position in -tracking_balance: if position.units.number < 0 and len(income_tracking) == 1: posting_acct, = income_tracking elif position.units.number > 0 and len( expense_tracking) == 1: posting_acct, = expense_tracking else: continue new_posting = Posting(posting_acct, position.units, position.cost, None, None, None) new_postings.append(new_posting) link_id = 'tracking-' + compare.hash_entry(entry) new_links = entry.links | set([link_id]) entry = entry._replace(links=new_links) new_entry = entry._replace(postings=new_postings) new_entries.append(entry) if new_entry: new_entries.append(new_entry) return new_entries, errors
def context(self, ehash): matching_entries = [entry for entry in self.all_entries if ehash == compare.hash_entry(entry)] if not matching_entries: return # the hash should uniquely identify the entry assert len(matching_entries) == 1 entry = matching_entries[0] context_str = context.render_entry_context(self.all_entries, self.options, entry) return { 'hash': ehash, 'context': context_str.split("\n", 2)[2], 'filename': entry.meta['filename'], 'lineno': entry.meta['lineno'], 'journal': self._journal(matching_entries), }
def context(self, ehash=None): matching_entries = [entry for entry in self.entries if ehash == compare.hash_entry(entry)] contexts = [] dcontext = self.options['dcontext'] for entry in matching_entries: context_str = context.render_entry_context( self.entries, self.options, entry) hash_ = context_str.split("\n",2)[0].split(':')[1].strip() filenamelineno = context_str.split("\n",2)[1] filename = filenamelineno.split(":")[1].strip() lineno = int(filenamelineno.split(":")[2].strip()) contexts.append({ 'hash': hash_, 'context': context_str.split("\n",2)[2], 'filename': filename, 'line': lineno }) # TODO # if len(matching_entries) == 0: # print("ERROR: Could not find matching entry for '{}'".format(ehash), # file=oss) # # elif len(matching_entries) > 1: # print("ERROR: Ambiguous entries for '{}'".format(ehash), # file=oss) # print(file=oss) # dcontext = app.options['dcontext'] # printer.print_entries(matching_entries, dcontext, file=oss) # # else: return { 'hash': ehash, 'contexts': contexts, 'journal': self._journal_for_postings(matching_entries) }
def context_(ehash=None): "Render the before & after context around a transaction entry." matching_entries = [ entry for entry in app.entries if ehash == compare.hash_entry(entry) ] oss = io.StringIO() if len(matching_entries) == 0: print("ERROR: Could not find matching entry for '{}'".format(ehash), file=oss) elif len(matching_entries) > 1: print("ERROR: Ambiguous entries for '{}'".format(ehash), file=oss) print(file=oss) dcontext = app.options['dcontext'] printer.print_entries(matching_entries, dcontext, file=oss) else: entry = matching_entries[0] # Render the context. oss.write("<pre>\n") oss.write(context.render_entry_context(app.entries, app.options, entry)) oss.write("</pre>\n") # Render the filelinks. if FILELINK_PROTOCOL: meta = entry.meta uri = FILELINK_PROTOCOL.format(filename=meta.get('filename'), lineno=meta.get('lineno')) oss.write('<div class="filelink"><a href="{}">{}</a></div>'.format( uri, 'Open')) return render_global(pagetitle="Context: {}".format(ehash), contents=oss.getvalue())
def context(self, ehash): matching_entries = [entry for entry in self.all_entries if ehash == compare.hash_entry(entry)] if not matching_entries: return # the hash should uniquely identify the entry assert len(matching_entries) == 1 entry = matching_entries[0] context_str = context.render_entry_context(self.all_entries, self.options, entry) ctx = context_str.split("\n", 2) filenamelineno = ctx[1] filename = filenamelineno.split(":")[1].strip() lineno = int(filenamelineno.split(":")[2].strip()) return { 'hash': ehash, 'context': ctx[2], 'filename': filename, 'line': lineno, 'journal': self._journal(matching_entries) }
def render_entry_context(entries, options_map, entry): """Render the context before and after a particular transaction is applied. Args: entries: A list of directives. options_map: A dict of options, as produced by the parser. entry: The entry instance which should be rendered. (Note that this object is expected to be in the set of entries, not just structurally equal.) Returns: A multiline string of text, which consists of the context before the transaction is applied, the transaction itself, and the context after it is applied. You can just print that, it is in form that is intended to be consumed by the user. """ oss = io.StringIO() meta = entry.meta print("Hash:{}".format(compare.hash_entry(entry)), file=oss) print("Location: {}:{}".format(meta["filename"], meta["lineno"]), file=oss) # Get the list of accounts sorted by the order in which they appear in the # closest entry. order = {} if isinstance(entry, data.Transaction): order = { posting.account: index for index, posting in enumerate(entry.postings) } accounts = sorted(getters.get_entry_accounts(entry), key=lambda account: order.get(account, 10000)) # Accumulate the balances of these accounts up to the entry. balance_before, balance_after = interpolate.compute_entry_context( entries, entry) # Create a format line for printing the contents of account balances. max_account_width = max(map(len, accounts)) if accounts else 1 position_line = '{{:1}} {{:{width}}} {{:>49}}'.format( width=max_account_width) # Print the context before. print(file=oss) print("------------ Balances before transaction", file=oss) print(file=oss) before_hashes = set() for account in accounts: positions = balance_before[account].get_positions() for position in positions: before_hashes.add((account, hash(position))) print(position_line.format('', account, str(position)), file=oss) if not positions: print(position_line.format('', account, ''), file=oss) print(file=oss) # Print the entry itself. print(file=oss) print("------------ Transaction", file=oss) print(file=oss) dcontext = options_map['dcontext'] printer.print_entry(entry, dcontext, render_weights=True, file=oss) if isinstance(entry, data.Transaction): print(file=oss) # Print residuals. residual = interpolate.compute_residual(entry.postings) if not residual.is_empty(): # Note: We render the residual at maximum precision, for debugging. print('Residual: {}'.format(residual), file=oss) # Dump the tolerances used. tolerances = interpolate.infer_tolerances(entry.postings, options_map) if tolerances: print('Tolerances: {}'.format(', '.join( '{}={}'.format(key, value) for key, value in sorted(tolerances.items()))), file=oss) # Compute the total cost basis. cost_basis = inventory.Inventory(pos for pos in entry.postings if pos.cost is not None).reduce( convert.get_cost) if not cost_basis.is_empty(): print('Basis: {}'.format(cost_basis), file=oss) # Print the context after. print(file=oss) print("------------ Balances after transaction", file=oss) print(file=oss) for account in accounts: positions = balance_after[account].get_positions() for position in positions: changed = (account, hash(position)) not in before_hashes print(position_line.format('*' if changed else '', account, str(position)), file=oss) if not positions: print(position_line.format('', account, ''), file=oss) print(file=oss) return oss.getvalue()
def hash_entry(entry): """Hash an entry.""" return compare.hash_entry(entry)
def _journal_for_postings(self, postings, include_types=None, with_change_and_balance=False): journal = [] for posting, leg_postings, change, entry_balance in realization.iterate_with_balance( postings): if include_types and not isinstance(posting, include_types): continue if isinstance(posting, Transaction) or \ isinstance(posting, Note) or \ isinstance(posting, Balance) or \ isinstance(posting, Open) or \ isinstance(posting, Close) or \ isinstance(posting, Pad) or \ isinstance(posting, Event) or \ isinstance(posting, Document): entry = { 'meta': { 'type': posting.__class__.__name__.lower(), 'filename': posting.meta['filename'], 'lineno': posting.meta['lineno'] }, 'date': posting.date, 'hash': compare.hash_entry(posting), 'metadata': posting.meta.copy() } entry['metadata'].pop("__tolerances__", None) entry['metadata'].pop("filename", None) entry['metadata'].pop("lineno", None) if isinstance(posting, Open): entry['account'] = posting.account entry['currencies'] = posting.currencies if isinstance(posting, Close): entry['account'] = posting.account if isinstance(posting, Event): entry['type'] = posting.type entry['description'] = posting.description if isinstance(posting, Note): entry['comment'] = posting.comment if isinstance(posting, Document): entry['account'] = posting.account entry['filename'] = posting.filename if isinstance(posting, Pad): entry['account'] = posting.account entry['source_account'] = posting.source_account if isinstance(posting, Balance): entry['account'] = posting.account entry['change'] = { posting.amount.currency: posting.amount.number } entry['amount'] = { posting.amount.currency: posting.amount.number } if posting.diff_amount: balance = entry_balance.get_units( posting.amount.currency) entry['diff_amount'] = { posting.diff_amount.currency: posting.diff_amount.number } entry['balance'] = {balance.currency: balance.number} if isinstance(posting, Transaction): if posting.flag == 'P': entry['meta'][ 'type'] = 'padding' # TODO handle Padding, Summarize and Transfer entry['flag'] = posting.flag entry['payee'] = posting.payee entry['narration'] = posting.narration entry['tags'] = posting.tags or [] entry['links'] = posting.links or [] entry['legs'] = [] for posting_ in posting.postings: leg = { 'account': posting_.account, 'flag': posting_.flag, 'hash': entry['hash'] } if posting_.position: leg['position'] = posting_.position.number leg['position_currency'] = posting_.position.lot.currency cost = interpolate.get_posting_weight(posting_) leg['cost'] = cost.number leg['cost_currency'] = cost.currency if posting_.price: leg['price'] = posting_.price.number leg['price_currency'] = posting_.price.currency entry['legs'].append(leg) if with_change_and_balance: if isinstance(posting, Balance): entry['change'] = { posting.amount.currency: posting.amount.number } entry['balance'] = self._inventory_to_json( entry_balance ) #, include_currencies=entry['change'].keys()) if isinstance(posting, Transaction): entry['change'] = self._inventory_to_json(change) entry['balance'] = self._inventory_to_json( entry_balance, include_currencies=entry['change'].keys()) journal.append(entry) return journal
def render_context(self, entry): """See base class.""" # Note: rendering to global application. # Note(2): we could avoid rendering links to summarizing and transfer # entries which are not going to be found. return self.build_global('context', ehash=compare.hash_entry(entry))
def wrap_entry(entry: bean.Directive): return { "type": get_entry_type(entry), "entry": entry, "hash": compare.hash_entry(entry), }
def link_documents(entries: Entries, _: Any) -> tuple[Entries, list[DocumentError]]: """Link entries to documents.""" errors = [] # All document indices by their full file path. by_fullname = {} # All document indices by their file basename. by_basename = defaultdict(list) for index, entry in enumerate(entries): if isinstance(entry, Document): by_fullname[entry.filename] = index by_basename[basename(entry.filename)].append((index, entry)) for index, entry in enumerate(entries): disk_docs = [ value for key, value in entry.meta.items() if key.startswith("document") ] if not disk_docs: continue hash_ = hash_entry(entry)[:8] entry_accounts = get_entry_accounts(entry) for disk_doc in disk_docs: documents = [ j for j, document in by_basename[disk_doc] if document.account in entry_accounts ] disk_doc_path = normpath( join(dirname(entry.meta["filename"]), disk_doc)) if disk_doc_path in by_fullname: documents.append(by_fullname[disk_doc_path]) if not documents: errors.append( DocumentError( entry.meta, f"Document not found: '{disk_doc}'", entry, )) continue for j in documents: # Since we might link a document multiple times, we have to use # the index for the replacement here. doc: Document = entries[j] # type: ignore entries[j] = doc._replace( links=add_to_set(doc.links, hash_), tags=add_to_set(doc.tags, "linked"), ) # The other entry types do not support links, so only add links for # txns. if isinstance(entry, Transaction): entries[index] = entry._replace( links=add_to_set(entry.links, hash_)) return entries, errors
def __call__(self, entry): return hash_entry(entry)
def _hash_entry(entry): return 'entry-' + hash_entry(entry)
def _journal_for_postings(self, postings, include_types=None, with_change_and_balance=False): journal = [] for posting, leg_postings, change, entry_balance in realization.iterate_with_balance(postings): if include_types and not isinstance(posting, include_types): continue if isinstance(posting, Transaction) or \ isinstance(posting, Note) or \ isinstance(posting, Balance) or \ isinstance(posting, Open) or \ isinstance(posting, Close) or \ isinstance(posting, Pad) or \ isinstance(posting, Event) or \ isinstance(posting, Document): entry = { 'meta': { 'type': posting.__class__.__name__.lower(), 'filename': posting.meta['filename'], 'lineno': posting.meta['lineno'] }, 'date': posting.date, 'hash': compare.hash_entry(posting), 'metadata': posting.meta.copy() } entry['metadata'].pop("__tolerances__", None) entry['metadata'].pop("filename", None) entry['metadata'].pop("lineno", None) if isinstance(posting, Open): entry['account'] = posting.account entry['currencies'] = posting.currencies if isinstance(posting, Close): entry['account'] = posting.account if isinstance(posting, Event): entry['type'] = posting.type entry['description'] = posting.description if isinstance(posting, Note): entry['comment'] = posting.comment if isinstance(posting, Document): entry['account'] = posting.account entry['filename'] = posting.filename if isinstance(posting, Pad): entry['account'] = posting.account entry['source_account'] = posting.source_account if isinstance(posting, Balance): entry['account'] = posting.account entry['change'] = { posting.amount.currency: posting.amount.number } entry['amount'] = { posting.amount.currency: posting.amount.number } if posting.diff_amount: balance = entry_balance.get_units(posting.amount.currency) entry['diff_amount'] = { posting.diff_amount.currency: posting.diff_amount.number } entry['balance'] = { balance.currency: balance.number } if isinstance(posting, Transaction): if posting.flag == 'P': entry['meta']['type'] = 'padding' # TODO handle Padding, Summarize and Transfer entry['flag'] = posting.flag entry['payee'] = posting.payee entry['narration'] = posting.narration entry['tags'] = posting.tags or [] entry['links'] = posting.links or [] entry['legs'] = [] for posting_ in posting.postings: leg = { 'account': posting_.account, 'flag': posting_.flag, 'hash': entry['hash'] } if posting_.position: leg['position'] = posting_.position.number leg['position_currency'] = posting_.position.lot.currency cost = interpolate.get_posting_weight(posting_) leg['cost'] = cost.number leg['cost_currency'] = cost.currency if posting_.price: leg['price'] = posting_.price.number leg['price_currency'] = posting_.price.currency entry['legs'].append(leg) if with_change_and_balance: if isinstance(posting, Balance): entry['change'] = { posting.amount.currency: posting.amount.number } entry['balance'] = self._inventory_to_json(entry_balance) #, include_currencies=entry['change'].keys()) if isinstance(posting, Transaction): entry['change'] = self._inventory_to_json(change) entry['balance'] = self._inventory_to_json(entry_balance, include_currencies=entry['change'].keys()) journal.append(entry) return journal
def do_roundtrip(filename, unused_args): """Round-trip test on arbitrary Ledger. Read a Ledger's transactions, print them out, re-read them again and compare them. Both sets of parsed entries should be equal. Both printed files are output to disk, so you can also run diff on them yourself afterwards. Args: filename: A string, the Beancount input filename. """ from beancount.parser import printer from beancount.core import compare from beancount import loader round1_filename = round2_filename = None try: logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') logging.info("Read the entries") entries, errors, options_map = loader.load_file(filename) printer.print_errors(errors, file=sys.stderr) logging.info("Print them out to a file") basename, extension = path.splitext(filename) round1_filename = ''.join([basename, '.roundtrip1', extension]) with open(round1_filename, 'w') as outfile: printer.print_entries(entries, file=outfile) logging.info("Read the entries from that file") # Note that we don't want to run any of the auto-generation here, but # parsing now returns incomplete objects and we assume idempotence on a # file that was output from the printer after having been processed, so # it shouldn't add anything new. That is, a processed file printed and # resolve when parsed again should contain the same entries, i.e. # nothing new should be generated. entries_roundtrip, errors, options_map = loader.load_file( round1_filename) # Print out the list of errors from parsing the results. if errors: print( ',----------------------------------------------------------------------' ) printer.print_errors(errors, file=sys.stdout) print( '`----------------------------------------------------------------------' ) logging.info("Print what you read to yet another file") round2_filename = ''.join([basename, '.roundtrip2', extension]) with open(round2_filename, 'w') as outfile: printer.print_entries(entries_roundtrip, file=outfile) logging.info("Compare the original entries with the re-read ones") same, missing1, missing2 = compare.compare_entries( entries, entries_roundtrip) if same: logging.info('Entries are the same. Congratulations.') else: logging.error('Entries differ!') print() print('\n\nMissing from original:') for entry in entries: print(entry) print(compare.hash_entry(entry)) print(printer.format_entry(entry)) print() print('\n\nMissing from round-trip:') for entry in missing2: print(entry) print(compare.hash_entry(entry)) print(printer.format_entry(entry)) print() finally: for rfilename in (round1_filename, round2_filename): if path.exists(rfilename): os.remove(rfilename)
def _journal_for_postings(self, postings, include_types=None): journal = [] for posting, leg_postings, change, entry_balance in realization.iterate_with_balance(postings): if include_types and not isinstance(posting, include_types): continue if isinstance(posting, Transaction) or \ isinstance(posting, Note) or \ isinstance(posting, Balance) or \ isinstance(posting, Open) or \ isinstance(posting, Close) or \ isinstance(posting, Pad) or \ isinstance(posting, Document): # TEMP # if isinstance(posting, TxnPosting): # posting = posting.txn entry = { 'meta': { 'type': posting.__class__.__name__.lower(), 'filename': posting.meta['filename'], 'lineno': posting.meta['lineno'] }, 'date': posting.date, 'hash': compare.hash_entry(posting) } if isinstance(posting, Open): entry['account'] = posting.account entry['currencies'] = posting.currencies entry['booking'] = posting.booking # TODO im html-template if isinstance(posting, Close): entry['account'] = posting.account if isinstance(posting, Note): entry['comment'] = posting.comment if isinstance(posting, Document): entry['account'] = posting.account entry['filename'] = posting.filename if isinstance(posting, Pad): entry['account'] = posting.account entry['source_account'] = posting.source_account if isinstance(posting, Balance): # TODO failed balances entry['account'] = posting.account entry['change'] = { posting.amount.currency: posting.amount.number } entry['balance'] = { posting.amount.currency: posting.amount.number } entry['tolerance'] = posting.tolerance # TODO currency? TODO in HTML-template entry['diff_amount'] = posting.diff_amount # TODO currency? TODO in HTML-template if isinstance(posting, Transaction): if posting.flag == 'P': entry['meta']['type'] = 'padding' entry['flag'] = posting.flag entry['payee'] = posting.payee entry['narration'] = posting.narration entry['tags'] = posting.tags entry['links'] = posting.links entry['change'] = self._inventory_to_json(change) entry['balance'] = self._inventory_to_json(entry_balance) entry['legs'] = [] for posting_ in posting.postings: leg = { 'account': posting_.account, 'flag': posting_.flag, 'hash': entry['hash'] } if posting_.position: leg['position'] = posting_.position.number leg['position_currency'] = posting_.position.lot.currency if posting_.price: leg['price'] = posting_.price.number leg['price_currency'] = posting_.price.currency entry['legs'].append(leg) journal.append(entry) return journal
def hash_entry(entry: Directive) -> str: """Hash an entry.""" return compare.hash_entry(entry)
def __call__(self, context): return hash_entry(context.entry)
def hash_entry(self, entry): return compare.hash_entry(entry)