def find_insert_position( entry: Directive, insert_options: list[InsertEntryOption], default_filename: str, ) -> tuple[str, int | None]: """Find insert position for an entry. Args: entry: An entry. insert_options: A list of InsertOption. default_filename: The default file to insert into if no option matches. Returns: A tuple of the filename and the line number. """ # Get the list of accounts that should be considered for the entry. # For transactions, we want the reversed list of posting accounts. accounts = get_entry_accounts(entry) # Make no assumptions about the order of insert_options entries and instead # sort them ourselves (by descending dates) insert_options = sorted(insert_options, key=attrgetter("date"), reverse=True) for account in accounts: for insert_option in insert_options: # Only consider InsertOptions before the entry date. if insert_option.date >= entry.date: continue if insert_option.re.match(account): return (insert_option.filename, insert_option.lineno - 1) return (default_filename, None)
def statement_path(self, entry_hash: str, metadata_key: str) -> str: """Returns the path for a statement found in the specified entry.""" entry = self.get_entry(entry_hash) value = entry.meta[metadata_key] accounts = set(get_entry_accounts(entry)) full_path = join(dirname(entry.meta["filename"]), value) for document in self.all_entries_by_type.Document: if document.filename == full_path: return document.filename if document.account in accounts: if basename(document.filename) == value: return document.filename raise FavaAPIException("Statement not found.")
def test_account_filter(example_ledger: FavaLedger) -> None: account_filter = AccountFilter(example_ledger.options, example_ledger.fava_options) account_filter.set("Assets") filtered_entries = account_filter.apply(example_ledger.all_entries) assert len(filtered_entries) == 541 for entry in filtered_entries: assert any( has_component(a, "Assets") for a in get_entry_accounts(entry)) account_filter.set(".*US:State") filtered_entries = account_filter.apply(example_ledger.all_entries) assert len(filtered_entries) == 67
def _include_entry(self, entry: Directive) -> bool: if self.value is None or self.match is None: return False return any( account.has_component(name, self.value) or self.match(name) for name in get_entry_accounts(entry))
def link_documents(entries: Entries, _: Any) -> tuple[Entries, list[DocumentError]]: """Link entries to documents.""" errors = [] # All document indices by their full file path. by_fullname = {} # All document indices by their file basename. by_basename = defaultdict(list) for index, entry in enumerate(entries): if isinstance(entry, Document): by_fullname[entry.filename] = index by_basename[basename(entry.filename)].append((index, entry)) for index, entry in enumerate(entries): disk_docs = [ value for key, value in entry.meta.items() if key.startswith("document") ] if not disk_docs: continue hash_ = hash_entry(entry)[:8] entry_accounts = get_entry_accounts(entry) for disk_doc in disk_docs: documents = [ j for j, document in by_basename[disk_doc] if document.account in entry_accounts ] disk_doc_path = normpath( join(dirname(entry.meta["filename"]), disk_doc)) if disk_doc_path in by_fullname: documents.append(by_fullname[disk_doc_path]) if not documents: errors.append( DocumentError( entry.meta, f"Document not found: '{disk_doc}'", entry, )) continue for j in documents: # Since we might link a document multiple times, we have to use # the index for the replacement here. doc: Document = entries[j] # type: ignore entries[j] = doc._replace( links=add_to_set(doc.links, hash_), tags=add_to_set(doc.tags, "linked"), ) # The other entry types do not support links, so only add links for # txns. if isinstance(entry, Transaction): entries[index] = entry._replace( links=add_to_set(entry.links, hash_)) return entries, errors