def metadata_spray_account_open(entries, replace_type, pattern, metadata_dict): errors = [] account_entries = getters.get_account_open_close(entries) regexer = re.compile(pattern) for account_ in account_entries: # Only operate on account Open entries entry = account_entries[account_] if(getattr(entry[0].__class__, '__name__') != 'Open'): continue if(regexer.match(account_)): spray_meta, spray_errors = metadata_spray( entry, replace_type, metadata_dict) spray_entry = data.Open(spray_meta, entry[0].date, entry[0].account, None, None) # Modify entries and update errors entry_index = entries.index(entry[0]) entries[entry_index] = spray_entry errors += spray_errors return entries, errors
def auto_insert_open(entries, unused_options_map): """Insert implicitly defined prices from Transactions. Explicit price entries are simply maintained in the output list. Prices from postings with costs or with prices from Transaction entries are synthesized as new Price entries in the list of entries output. Args: entries: A list of directives. We're interested only in the Transaction instances. unused_options_map: A parser options dict. Returns: A list of entries, possibly with more Price entries than before, and a list of errors. """ opened_accounts = { entry.account for entry in entries if isinstance(entry, data.Open) } new_entries = [] accounts_first, _ = getters.get_accounts_use_map(entries) for index, (account, date_first_used) in enumerate(sorted(accounts_first.items())): if account not in opened_accounts: meta = data.new_metadata('<auto_accounts>', index) new_entries.append( data.Open(meta, date_first_used, account, None, None)) if new_entries: new_entries.extend(entries) new_entries.sort(key=data.entry_sortkey) else: new_entries = entries return new_entries, []
def do_missing_open(filename, args): """Print out Open directives that are missing for the given input file. This can be useful during demos in order to quickly generate all the required Open directives without having to type them manually. Args: filename: A string, which consists in the filename. args: A tuple of the rest of arguments. We're expecting the first argument to be an integer as a string. """ from beancount.parser import printer from beancount.core import data from beancount.core import getters from beancount import loader entries, errors, options_map = loader.load_file(filename) # Get accounts usage and open directives. first_use_map, _ = getters.get_accounts_use_map(entries) open_close_map = getters.get_account_open_close(entries) new_entries = [] for account, first_use_date in first_use_map.items(): if account not in open_close_map: new_entries.append( data.Open(data.new_metadata(filename, 0), first_use_date, account, None, None)) dcontext = options_map['dcontext'] printer.print_entries(data.sorted(new_entries), dcontext)
def add_account(self, name, date=None, currency=None, initial_amount=None): date = date or config.DEFAULT_DATE name = self._format_account_name(name) self.accounts.append(name) currencies = currency and [currency] self.entries.append( bc.Open(self._get_meta(), date, name, currencies, booking=None)) if initial_amount is not None and initial_amount != 0: assert currency is not None self.pad_balances.append( bc.Pad(self._get_meta(), date=config.DEFAULT_PAD_DATE, account=name, source_account=config.OPENING_BALANCE_ACCOUNT)) self.pad_balances.append( bc.Balance(self._get_meta(), date=config.DEFAULT_BALANCE_DATE, account=name, amount=bc.Amount(initial_amount, currency), tolerance=None, diff_amount=None)) return len(self.accounts) - 1 # index of the account on self.accounts
def create_sort_data(self): account = 'Assets:Bank:Checking' date1 = date(2014, 1, 15) date2 = date(2014, 1, 18) date3 = date(2014, 1, 20) entries = [ data.Transaction(data.new_metadata(".", 1100), date3, FLAG, None, "Next day", None, None, []), data.Close(data.new_metadata(".", 1000), date2, account), data.Balance(data.new_metadata(".", 1001), date2, account, A('200.00 USD"'), None, None), data.Open(data.new_metadata(".", 1002), date2, account, 'USD', None), data.Transaction(data.new_metadata(".", 1009), date2, FLAG, None, "Transaction 2", None, None, []), data.Transaction(data.new_metadata(".", 1008), date2, FLAG, None, "Transaction 1", None, None, []), data.Transaction(data.new_metadata(".", 900), date1, FLAG, None, "Previous day", None, None, []), ] for entry in entries: if isinstance(entry, data.Transaction): data.create_simple_posting(entry, 'Assets:Bank:Checking', '123.45', 'USD') return entries
def get_holdings_entries(entries, options_map): """Summarizes the entries to list of entries representing the final holdings.. This list includes the latest prices entries as well. This can be used to load a full snapshot of holdings without including the entire history. This is a way of summarizing a balance sheet in a way that filters away history. Args: entries: A list of directives. options_map: A dict of parsed options. Returns: A string, the entries to print out. """ # The entries will be created at the latest date, against an equity account. latest_date = entries[-1].date _, equity_account, _ = options.get_previous_accounts(options_map) # Get all the assets. holdings_list, _ = holdings.get_assets_holdings(entries, options_map) # Create synthetic entries for them. holdings_entries = [] for index, holding in enumerate(holdings_list): meta = data.new_metadata('report_holdings_print', index) entry = data.Transaction(meta, latest_date, flags.FLAG_SUMMARIZE, None, "", None, None, []) # Convert the holding to a position. pos = holdings.holding_to_position(holding) entry.postings.append( data.Posting(holding.account, pos.units, pos.cost, None, None, None)) cost = -convert.get_cost(pos) entry.postings.append( data.Posting(equity_account, cost, None, None, None, None)) holdings_entries.append(entry) # Get opening directives for all the accounts. used_accounts = {holding.account for holding in holdings_list} open_entries = summarize.get_open_entries(entries, latest_date) used_open_entries = [ open_entry for open_entry in open_entries if open_entry.account in used_accounts ] # Add an entry for the equity account we're using. meta = data.new_metadata('report_holdings_print', -1) used_open_entries.insert( 0, data.Open(meta, latest_date, equity_account, None, None)) # Get the latest price entries. price_entries = prices.get_last_price_entries(entries, None) return used_open_entries + holdings_entries + price_entries
def print_results(self): open_accounts = [ bcdata.Open({}, datetime.date(1970, 1, 1), acct, None, None) for acct in sorted(self.initial_balances.keys()) ] opening_balances = list(self.transfer_opening_balances()) entries = self.entries beancount.parser.printer.print_entries(opening_balances + entries)
def Open(account, date): meta = meta_from(account, 'code description') name = account_name(account) commodity = [commodity_name(account.commodity) ] if account.commodity else None dates = list(map(lambda split: split.transaction.post_date, account.splits)) if len(dates) > 0: date = min(dates) return data.Open(meta, date, name, commodity, None)
def test_format_and_print_error(self): entry = data.Open(META, date(2014, 1, 15), 'Assets:Bank:Checking', [], None) error = interpolate.BalanceError(META, "Example balance error", entry) error_str = printer.format_error(error) self.assertTrue(isinstance(error_str, str)) oss = io.StringIO() printer.print_error(error, oss) self.assertTrue(isinstance(oss.getvalue(), str)) oss = io.StringIO() printer.print_errors([error], oss) self.assertTrue(isinstance(oss.getvalue(), str))
def create_open_directives(new_accounts, entries): meta = data.new_metadata('<zerosum>', 0) # Ensure that the accounts we're going to use to book the postings exist, by # creating open entries for those that we generated that weren't already # existing accounts. earliest_date = entries[0].date open_entries = getters.get_account_open_close(entries) new_open_entries = [] for account_ in sorted(new_accounts): if account_ not in open_entries: meta = data.new_metadata(meta['filename'], 0) open_entry = data.Open(meta, earliest_date, account_, None, None) new_open_entries.append(open_entry) return (new_open_entries)
def generate_transactions(self): """Iterate through the GL Records, generate entries""" for entries in self.gl_records.values(): self.entries.extend(self.entry_from_gl(entries)) for account in sorted(self.new_accounts): self.entries.append( data.Open(date=datetime.date(2010, 1, 1), account=account, meta={ 'filename': '', 'lineno': 0 }, currencies=None, booking=None))
def test_remove_account_postings(self): meta = data.new_metadata(".", 0) date = datetime.date.today() entry1 = data.Open(meta, date, 'Liabilities:US:CreditCard', None, None) entry2 = data.Open(meta, date, 'Equity:Rounding', None, None) entry3 = data.Transaction(meta, date, FLAG, None, "Something", None, None, []) data.create_simple_posting(entry3, 'Liabilities:US:CreditCard', '-50', 'USD') data.create_simple_posting(entry3, 'Equity:Rounding', '0.00123', 'USD') data.create_simple_posting(entry3, 'Expenses:Food:Restaurant', '50', 'USD') entry4 = data.Price(meta, date, 'HOOL', A('23 USD')) in_entries = [entry1, entry2, entry3, entry4] out_entries = data.remove_account_postings('Equity:Rounding', in_entries) self.assertEqual(4, len(out_entries)) self.assertEqual( ['Liabilities:US:CreditCard', 'Expenses:Food:Restaurant'], [posting.account for posting in out_entries[2].postings])
def expense_spread(entries, options_map): new_entries = [] spread_entries = [] meta = data.new_metadata('<spread_account>', 1) new_entries.append( data.Open(meta, datetime.date(2010, 1, 1), SPREAD_ACCOUNT, None, None)) for entry in entries: if isinstance(entry, data.Transaction): new_postings = [] for posting in entry.postings: if SPREAD_KEY in posting.meta: spread_entries.extend(spread_posting(entry, posting)) entry = replace_expenses_accounts(entry, SPREAD_ACCOUNT) new_entries.append(entry) return new_entries + spread_entries, []
def convert_account(account): meta = {'description': account.GetDescription()} gnc_commodity = account.GetCommodity() if gnc_commodity is None: commodity = None else: commodity = [ Converter.normalize_commodity(gnc_commodity.get_mnemonic()) ] splits = account.GetSplitList() if len(splits) == 0: import datetime date = datetime.datetime.today().strftime('%Y-%m-%d') else: date = splits[0].GetParent().GetDate().strftime('%Y-%m-%d') name = Converter.convert_full_name(account) return data.Open(meta, date, name, commodity, None)
def extract_new_accounts(self, statement: FlexStatement, existing_entries: list = None): existing = {} root_account = self.accounts['root'] existing_accounts = set() if existing_entries: for entry in existing_entries: if isinstance(entry, data.Open): existing_accounts.add(entry.account) results = [] for obj in statement.SecuritiesInfo: symbol = self.clean_symbol(obj.symbol) account_name = self.account_for_symbol(statement, symbol) if account_name in existing_accounts: continue meta = { 'lineno': 0, 'filename': '', 'description': obj.description, 'multiplier': obj.multiplier, 'conid': obj.conid, 'asset-type': obj.assetCategory.name.lower(), } if obj.cusip: meta['cusip'] = obj.cusip if obj.securityID: meta['security-id'] = obj.securityID entry = data.Open( date=datetime.date(2010, 1, 1), account=account_name, currencies=[symbol], meta=meta, booking=None, ) results.append(entry) return results
def open_dit_accounts(entries, dit_component): """ Minimally adapted from beancount.plugins.auto_accounts. """ opened_accounts = { entry.account for entry in entries if isinstance(entry, data.Open) } new_entries = [] accounts_first, _ = getters.get_accounts_use_map(entries) for index, (account, date_first_used) in enumerate(sorted(accounts_first.items())): if ((account not in opened_accounts) and has_component(account, dit_component)): meta = data.new_metadata(__name__, index) new_entry = data.Open(meta, date_first_used, account, None, None) new_entries.append(new_entry) return new_entries
def missing_open(filename): """Print Open directives missing in FILENAME. This can be useful during demos in order to quickly generate all the required Open directives without having to type them manually. """ entries, errors, options_map = loader.load_file(filename) # Get accounts usage and open directives. first_use_map, _ = getters.get_accounts_use_map(entries) open_close_map = getters.get_account_open_close(entries) new_entries = [] for account, first_use_date in first_use_map.items(): if account not in open_close_map: new_entries.append( data.Open(data.new_metadata(filename, 0), first_use_date, account, None, None)) dcontext = options_map['dcontext'] printer.print_entries(data.sorted(new_entries), dcontext)
def insert_currency_trading_postings(entries, options_map, config): """Insert currency trading postings. Args: entries: A list of directives. unused_options_map: An options map. config: The base account name for currency trading accounts. Returns: A list of new errors, if any were found. """ base_account = config.strip() if not account.is_valid(base_account): base_account = DEFAULT_BASE_ACCOUNT errors = [] new_entries = [] new_accounts = set() for entry in entries: if isinstance(entry, Transaction): curmap, has_price = group_postings_by_weight_currency(entry) if has_price and len(curmap) > 1: new_postings = get_neutralizing_postings( curmap, base_account, new_accounts) entry = entry._replace(postings=new_postings) if META_PROCESSED: entry.meta[META_PROCESSED] = True new_entries.append(entry) earliest_date = entries[0].date open_entries = [ data.Open(data.new_metadata('<currency_accounts>', index), earliest_date, acc, None, None) for index, acc in enumerate(sorted(new_accounts)) ] return open_entries + new_entries, errors
def share_postings( entries: List[Any], options_map: Dict[str, Any], config: str ) -> Tuple[List[Union[data.Open, data.Transaction]], List[None]]: """Share postings among a number of participants (see module docstring for details). Args: entries: A list of directives. We're interested only in the Transaction instances. unused_options_map: A parser options dict. config: The plugin configuration string. Returns a tuple containing: * A list of entries, with potentially more accounts and potentially more postings with smaller amounts. * None """ # Validate and sanitize configuration. if isinstance(config, str): members = config.split() elif isinstance(config, (tuple, list)): members = config else: raise RuntimeError( "Invalid plugin configuration: configuration for share_postings " "should be a string or a sequence." ) # Filter the entries and transform transactions. open_entries = [] new_entries = [] for entry in entries: if isinstance(entry, data.Open): sharees, shared_notation = get_sharing_info(entry.account, members) if sharees: # Create Open directives for shared accounts if necessary. for member in sharees: open_date = entry.date meta = data.new_metadata("<share_postings>", 0) open_entries.append( data.Open( meta, open_date, entry.account.replace(shared_notation, member), None, None, ) ) continue if isinstance(entry, data.Transaction): new_postings = [] for posting in entry.postings: sharees, shared_notation = get_sharing_info(posting.account, members) if sharees == []: new_postings.append(posting) continue split_units = amount.Amount( posting.units.number / len(sharees), posting.units.currency ) for member in sharees: subaccount = posting.account.replace(shared_notation, member) # Ensure the modified postings are marked as # automatically calculated, so that the resulting # calculated amounts aren't used to affect inferred # tolerances. meta = posting.meta.copy() meta[interpolate.AUTOMATIC_META] = True # Add a new posting for each member, to an account # with the name of this member. if new_postings and subaccount == new_postings[-1].account: ## Aggregate postings for the same member new_amount = amount.Amount( new_postings[-1].units.number + split_units.number, posting.units.currency, ) new_postings[-1] = posting._replace( meta=meta, account=subaccount, units=new_amount, cost=posting.cost, ) else: new_postings.append( posting._replace( meta=meta, account=subaccount, units=split_units, cost=posting.cost, ) ) # Modify the entry in-place, replace its postings. entry = entry._replace(postings=new_postings) new_entries.append(entry) return open_entries + new_entries, []
def unwrap_entry(data: dict) -> bean.Directive: type, e = itemgetter("type", "entry")(data) meta = e.get("meta") date = parse_date(e["date"]) if type == "Open": return bean.Open( meta, date, account=e["account"], currencies=e.get("currencies", []), booking=e.get("booking"), ) if type == "Close": return bean.Close(meta, date, account=e["account"]) if type == "Commodity": return bean.Commodity(meta, date, currency=e["currency"]) if type == "Pad": return bean.Pad(meta, date, account=e["account"], source_account=e["source_account"]) if type == "Balance": return bean.Balance( meta, date, account=e["account"], amount=parse_amount(e["amount"]), tolerance=e.get("tolerance"), diff_amount=e.get("diff_amount"), ) if type == "Transaction": return bean.Transaction( meta, date, flag=e["flag"], payee=e.get("payee"), narration=e["narration"], tags=set(e["tags"] if "tags" in e else []), links=set(e["links"] if "links" in e else []), postings=[parse_posting(p) for p in e.get("postings", [])], ) if type == "Note": return bean.Note(meta, date, account=e["account"], comment=e.get("comment", "")) if type == "Event": return bean.Event(meta, date, type=e["type"], description=e["description"]) if type == "Query": return bean.Query(meta, date, name=e["name"], query_string=e["query_string"]) if type == "Price": return bean.Price(meta, date, currency=e["currency"], amount=parse_amount(e["amount"])) if type == "Document": return bean.Document( meta, date, account=e["account"], filename=e["filename"], tags=set(e["tags"] if "tags" in e else []), links=set(e["links"] if "links" in e else []), ) if type == "Custom": return bean.Custom(meta, date, type=e["type"], values=e["values"])
def split_income(entries, options_map, config_str): """Split income transactions.""" # pylint: disable=not-callable,too-many-locals errors = [] new_entries = [] new_accounts = set() config = { 'income': options_map['name_income'], 'net_income': options_map['name_income'] + ':Net', 'tag': 'pretax', 'taxes': options_map['name_expenses'] + ':Taxes', } if config_str.strip(): try: expr = ast.literal_eval(config_str) config.update(expr) except (SyntaxError, ValueError): errors.append( SplitIncomeError( data.new_metadata(options_map['filename'], 0), "Syntax error in config: {}".format(config_str), None)) return entries, errors for entry in entries: if not isinstance(entry, data.Transaction) or not any( account.startswith(config['income']) for account in getters.get_entry_accounts(entry)): continue # The new entry containing the raw income and taxes. new_entry = copy.deepcopy(entry) new_entry = new_entry._replace( postings=[], tags=frozenset(set([config['tag']]) | entry.tags)) new_entries.append(new_entry) income = collections.defaultdict(Inventory) taxes = collections.defaultdict(Decimal) for posting in list(entry.postings): if posting.account.startswith(config['income']): new_entry.postings.append(posting) entry.postings.remove(posting) income[posting.account].add_amount(posting.units) elif re.match(config['taxes'], posting.account): new_entry.postings.append(posting) entry.postings.remove(posting) taxes[posting.units.currency] += posting.units.number for account, inv in income.items(): net_account = account.replace(config['income'], config['net_income']) if net_account not in new_accounts: new_accounts.add(net_account) new_entries.append( data.Open(data.new_metadata('<split_income>', 0), entry.date, net_account, None, None)) for pos in inv: amount = pos.units number = amount.number + taxes.pop(amount.currency, ZERO) data.create_simple_posting(entry, net_account, number, amount.currency) data.create_simple_posting(new_entry, net_account, -number, amount.currency) return entries + new_entries, errors
def internalize(entries, transfer_account, accounts_value, accounts_intflows, accounts_internalize=None): """Internalize internal flows that would be lost because booked against external flow accounts. This splits up entries that have accounts both in internal flows and external flows. A new set of entries are returned, along with a list of entries that were split and replaced by a pair of entries. Args: entries: A list of directives to process for internalization. transfer_account: A string, the name of an account to use for internalizing entries which need to be split between internal and external flows. A good default value would be an equity account, 'Equity:Internalized' or something like that. accounts_value: A set of account name strings, the names of the asset accounts included in valuing the portfolio. accounts_intflows: A set of account name strings, the names of internal flow accounts (normally income and expenses) that aren't external flows. accounts_internalize: A set of account name strings to trigger explicit internalization of transactions with no value account. If a transaction is found that has only internal accounts and external accounts, the postings whose accounts are in this set of accounts will be internalize. This is a method that can be used to pull dividends in the portfolio when valueing portfolios without their cash component. See docstring and documentation for details. If specific, this set of accounts must be a subset of the internal flows accounts. Returns: A pair of the new list of internalized entries, including all the other entries, and a short list of just the original entires that were removed and replaced by pairs of entries. """ # Verify that external flow entries only affect balance sheet accounts and # not income or expenses accounts (internal flows). We do this because we # want to ensure that all income and expenses are incurred against assets # that live within the assets group. An example of something we'd like to # avoid is an external flow paying for fees incurred within the account that # should diminish the returns of the related accounts. To fix this, we split # the entry into two entries, one without external flows against an transfer # account that we consider an assets account, and just the external flows # against this same tranfer account. assert (isinstance( transfer_account, str)), ("Invalid transfer account: {}".format(transfer_account)) if accounts_internalize and not (accounts_internalize <= accounts_intflows): raise ValueError( "Internalization accounts is not a subset of internal flows accounts." ) new_entries = [] replaced_entries = [] index = 1 for entry in entries: if not isinstance(entry, data.Transaction): new_entries.append(entry) continue # Break up postings into the three categories. postings_assets = [] postings_intflows = [] postings_extflows = [] postings_internalize = [] for posting in entry.postings: if posting.account in accounts_value: postings_list = postings_assets elif posting.account in accounts_intflows: postings_list = postings_intflows else: postings_list = postings_extflows postings_list.append(posting) if accounts_internalize and posting.account in accounts_internalize: postings_internalize.append(posting) # Check if the entry is to be internalized and split it up in two # entries and replace the entrie if that's the case. if (postings_intflows and postings_extflows and (postings_assets or postings_internalize)): replaced_entries.append(entry) # We will attach a link to each of the split entries. link = LINK_FORMAT.format(index) index += 1 # Calculate the weight of the balance to transfer. balance_transfer = inventory.Inventory() for posting in postings_extflows: balance_transfer.add_amount( posting.position.get_weight(posting.price)) prototype_entry = entry._replace(flag=flags.FLAG_RETURNS, links=(entry.links or set()) | set([link])) # Create internal flows posting. postings_transfer_int = [ data.Posting(transfer_account, position_, None, None, None) for position_ in balance_transfer.get_positions() ] new_entries.append( prototype_entry._replace(postings=(postings_assets + postings_intflows + postings_transfer_int))) # Create external flows posting. postings_transfer_ext = [ data.Posting(transfer_account, -position_, None, None, None) for position_ in balance_transfer.get_positions() ] new_entries.append( prototype_entry._replace(postings=(postings_transfer_ext + postings_extflows))) else: new_entries.append(entry) # The transfer account does not have an Open entry, insert one. (This is # just us being pedantic about Beancount requirements, this will not change # the returns, but if someone looks at internalized entries it produces a # correct set of entries you can load cleanly). open_close_map = getters.get_account_open_close(new_entries) if transfer_account not in open_close_map: open_transfer_entry = data.Open( data.new_metadata("beancount.projects.returns", 0), new_entries[0].date, transfer_account, None, None) new_entries.insert(0, open_transfer_entry) return new_entries, replaced_entries
def remote_accounts(entries, options_map): """* Remote Accounts Fetch a list of accounts and possible meta-data from a google sheet. Merge these with the current file's 'open' directive. Any "new" entries are dynamically injected and optionally burned to a local file. Accepts the following Bean based configuration: 2018-06-14 custom "coolbeans" "accounts-workbook-url" "URL to Google Sheet" 2018-06-14 custom "coolbeans" "accounts-sheet-name" "NameOfTab" 2018-06-14 custom "coolbeans" "new-accounts-bean" "reports/new-accounts.bean" 2018-06-14 custom "coolbeans" "google-apis" "~/.google-apis.json" secrets_filename = os.environ.get('GOOGLE_APIS', path.expanduser('~/.google-apis.json')) """ settings = options_map['coolbeans'] secrets_file = get_setting('google-apis', settings) connection = google_connect(secrets_file) new_accounts_path = None new_accounts_file = get_setting('new-accounts-bean', settings) if new_accounts_file: new_accounts_path = pathlib.Path(new_accounts_file) workbook_url = get_setting('accounts-workbook-url', settings) sheet_name = get_setting('accounts-sheet-name', settings) if not workbook_url or not sheet_name: logger.error("Unable to configure Accounts Sync") return entries, [] workbook = None if workbook_url: try: workbook = connection.open(workbook_url) except gspread.exceptions.SpreadsheetNotFound: logger.error(f"Unable to find Google Sheets '{workbook}'") available: typing.List[gspread.Worksheet] = connection.openall() logger.error(f"Possible {(a.title for a in available)}") raise ValueError(f"Invalid Google Sheets URL {workbook}") # else: # # Use the first available sheet for these credentials: # all_sheets = connection.openall() # if all_sheets: # sheet = all_sheets[0] # logger.info(f"Connecting to google sheet '{sheet.title}'.") # else: # print("Credentials unable to find any authorized sheets.") # sys.exit(3) sheet = safe_open_sheet(workbook, sheet_name) possible_accounts = load_accounts_from_sheet(sheet) DEBUG_CONTEXT['possible_accounts'] = possible_accounts sheet_by_name = dict( (a['account'], a) for a in possible_accounts ) last_row = len(possible_accounts) + 1 append_to_sheet = [] open_by_account = {} # Some Account Trees Should be Hidden, use a 'hidden': 1 Meta hidden_prefixes = set() for entry in entries: DEBUG_CONTEXT['entry'] = entry if not isinstance(entry, data.Open): continue hidden = entry.meta.get('hidden', 0) if bool(int(hidden)): hidden_prefixes.add(entry.account) # Make a List of Local Entries, not on the Sheet for entry in entries: DEBUG_CONTEXT['entry'] = entry if not isinstance(entry, data.Open): continue match = False for hidden_name in hidden_prefixes: if entry.account.startswith(hidden_name): match = True break open_by_account[entry.account] = entry if match: continue if entry.account not in sheet_by_name: # Skip these things if entry.account.endswith("Unrealized"): continue new_account = { 'account': entry.account, 'currencies': ','.join(entry.currencies or []), 'slug': entry.meta.get('slug', ''), 'account_number': str(entry.meta.get('account_number', '')), 'institution': entry.meta.get('institution', ''), 'date': entry.date.strftime("%Y-%m-%d") } append_to_sheet.append(new_account) DEBUG_CONTEXT['open'] = open_by_account # Make a List of Entries on the Sheet but Not in our Books new_entries = [] for account, record in sheet_by_name.items(): DEBUG_CONTEXT['current'] = (account, record) if account in open_by_account: # This _might_ be a modified entry, in which case we should use # Meta Attributes set in the Sheet! open_entry: data.Open = open_by_account[account] sheet_entry: dict = record compare_fields = ('slug', 'account_number', 'institution', 'document_name', 'document_tab') for field in compare_fields: sheet_val = str(sheet_entry[field]) if sheet_val and sheet_val != open_entry.meta.get(field, None): open_entry.meta[field] = sheet_val if open_entry not in new_entries: new_entries.append(open_entry) continue # logging.info(f"New Account {account} from sheet: {record}.") # noinspection PyBroadException try: record = dict(record) record.pop('account') currencies = record.pop('currencies', '') if currencies: currencies = re.split(r'\W+', currencies) datestr = record.pop('date', "2000-01-01") or "2000-01-01" y, m, d = datestr.split('-') open_date = datetime.date(year=int(y), month=int(m), day=int(d)) meta = dict((k, str(v)) for (k, v) in record.items() if v) meta['lineno'] = 0 meta['filename'] = '' entry = data.Open( account=account, currencies=currencies, date=open_date, meta=meta, booking=None ) # We add this to the "live" system as well entries.append(entry) new_entries.append(entry) except Exception: logger.exception(f"Unable to create new entry for {account}") if new_entries: from beancount.parser import printer with new_accounts_path.open("w") as stream: printer.print_entries(new_entries, file=stream) logger.info(f"Wrote {len(new_entries)} new account(s) to {new_accounts_path}.") # Write all the entries back to the end of the sheet append_to_sheet.sort(key=lambda x: x['account']) header = sheet.row_values(1) rows = [] for item in append_to_sheet: row = [str(item.get(f, '')) for f in header] rows.append(row) new_sheet = safe_open_sheet(workbook, "Accounts (Missing)") new_sheet.update([header]+rows) return entries, []
#!/usr/bin/env python3 from datetime import date import sys from beancount import loader from beancount.core import compare,data from beancount.parser import printer from youqianDict import CategoryAll entries_existing=[] if len(sys.argv) > 1: filename = sys.argv[1] entries_existing, errors, options = loader.load_file(filename) entries_new=[] accounts_existing=[i.account for i in entries_existing] for account_youqian in sorted(set(CategoryAll.values())): if account_youqian not in accounts_existing: entries_new.append( data.Open( meta=None, booking=None, date=date(1970, 1, 1), account=account_youqian, currencies=["CNY"] ) ) for entry in entries_new: printer.print_entry(entry)
def split_expenses(entries, options_map, config): """Split postings according to expenses (see module docstring for details). Args: entries: A list of directives. We're interested only in the Transaction instances. unused_options_map: A parser options dict. config: The plugin configuration string. Returns: A list of entries, with potentially more accounts and potentially more postings with smaller amounts. """ # Validate and sanitize configuration. if isinstance(config, str): members = config.split() elif isinstance(config, (tuple, list)): members = config else: raise RuntimeError( "Invalid plugin configuration: configuration for split_expenses " "should be a string or a sequence.") acctypes = options.get_account_types(options_map) def is_expense_account(account): return account_types.get_account_type(account) == acctypes.expenses # A predicate to quickly identify if an account contains the name of a # member. is_individual_account = re.compile('|'.join(map(re.escape, members))).search # Existing and previously unseen accounts. new_accounts = set() # Filter the entries and transform transactions. new_entries = [] for entry in entries: if isinstance(entry, data.Transaction): new_postings = [] for posting in entry.postings: if (is_expense_account(posting.account) and not is_individual_account(posting.account)): # Split this posting into multiple postings. split_units = amount.Amount( posting.units.number / len(members), posting.units.currency) for member in members: # Mark the account as new if never seen before. subaccount = account.join(posting.account, member) new_accounts.add(subaccount) # Ensure the modified postings are marked as # automatically calculated, so that the resulting # calculated amounts aren't used to affect inferred # tolerances. meta = posting.meta.copy() if posting.meta else {} meta[interpolate.AUTOMATIC_META] = True # Add a new posting for each member, to a new account # with the name of this member. new_postings.append( posting._replace(meta=meta, account=subaccount, units=split_units, cost=posting.cost)) else: new_postings.append(posting) # Modify the entry in-place, replace its postings. entry = entry._replace(postings=new_postings) new_entries.append(entry) # Create Open directives for new subaccounts if necessary. oc_map = getters.get_account_open_close(entries) open_date = entries[0].date meta = data.new_metadata('<split_expenses>', 0) open_entries = [] for new_account in new_accounts: if new_account not in oc_map: entry = data.Open(meta, open_date, new_account, None, None) open_entries.append(entry) return open_entries + new_entries, []
def add_unrealized_gains(entries, options_map, subaccount=None): """Insert entries for unrealized capital gains. This function inserts entries that represent unrealized gains, at the end of the available history. It returns a new list of entries, with the new gains inserted. It replaces the account type with an entry in an income account. Optionally, it can book the gain in a subaccount of the original and income accounts. Args: entries: A list of data directives. options_map: A dict of options, that confirms to beancount.parser.options. subaccount: A string, and optional the name of a subaccount to create under an account to book the unrealized gain. If this is left to its default value, the gain is booked directly in the same account. Returns: A list of entries, which includes the new unrealized capital gains entries at the end, and a list of errors. The new list of entries is still sorted. """ errors = [] meta = data.new_metadata('<unrealized_gains>', 0) account_types = options.get_account_types(options_map) # Assert the subaccount name is in valid format. if subaccount: validation_account = account.join(account_types.assets, subaccount) if not account.is_valid(validation_account): errors.append( UnrealizedError( meta, "Invalid subaccount name: '{}'".format(subaccount), None)) return entries, errors if not entries: return (entries, errors) # Group positions by (account, cost, cost_currency). price_map = prices.build_price_map(entries) new_entries = [] # Start at the first month after our first transaction date = date_utils.next_month(entries[0].date) last_month = date_utils.next_month(entries[-1].date) last_holdings_with_currencies = None while date <= last_month: date_entries, holdings_with_currencies, date_errors = add_unrealized_gains_at_date( entries, new_entries, account_types.income, price_map, date, meta, subaccount) new_entries.extend(date_entries) errors.extend(date_errors) if last_holdings_with_currencies: for account_, cost_currency, currency in last_holdings_with_currencies - holdings_with_currencies: # Create a negation transaction specifically to mark that all gains have been realized if subaccount: account_ = account.join(account_, subaccount) latest_unrealized_entry = find_previous_unrealized_transaction( new_entries, account_, cost_currency, currency) if not latest_unrealized_entry: continue entry = data.Transaction( data.new_metadata(meta["filename"], lineno=999, kvlist={'prev_currency': currency}), date, flags.FLAG_UNREALIZED, None, 'Clear unrealized gains/losses of {}'.format(currency), set(), set(), []) # Negate the previous transaction because of unrealized gains are now 0 for posting in latest_unrealized_entry.postings[:2]: entry.postings.append( data.Posting(posting.account, -posting.units, None, None, None, None)) new_entries.append(entry) last_holdings_with_currencies = holdings_with_currencies date = date_utils.next_month(date) # Ensure that the accounts we're going to use to book the postings exist, by # creating open entries for those that we generated that weren't already # existing accounts. new_accounts = { posting.account for entry in new_entries for posting in entry.postings } open_entries = getters.get_account_open_close(entries) new_open_entries = [] for index, account_ in enumerate(sorted(new_accounts)): if account_ not in open_entries: meta = data.new_metadata(meta["filename"], index) open_entry = data.Open(meta, new_entries[0].date, account_, None, None) new_open_entries.append(open_entry) return (entries + new_open_entries + new_entries, errors)
def add_unrealized_gains(entries, options_map, subaccount=None): """Insert entries for unrealized capital gains. This function inserts entries that represent unrealized gains, at the end of the available history. It returns a new list of entries, with the new gains inserted. It replaces the account type with an entry in an income account. Optionally, it can book the gain in a subaccount of the original and income accounts. Args: entries: A list of data directives. options_map: A dict of options, that confirms to beancount.parser.options. subaccount: A string, and optional the name of a subaccount to create under an account to book the unrealized gain. If this is left to its default value, the gain is booked directly in the same account. Returns: A list of entries, which includes the new unrealized capital gains entries at the end, and a list of errors. The new list of entries is still sorted. """ errors = [] meta = data.new_metadata('<unrealized_gains>', 0) account_types = options.get_account_types(options_map) # Assert the subaccount name is in valid format. if subaccount: validation_account = account.join(account_types.assets, subaccount) if not account.is_valid(validation_account): errors.append( UnrealizedError(meta, "Invalid subaccount name: '{}'".format(subaccount), None)) return entries, errors if not entries: return (entries, errors) # Group positions by (account, cost, cost_currency). price_map = prices.build_price_map(entries) holdings_list = holdings.get_final_holdings(entries, price_map=price_map) # Group positions by (account, cost, cost_currency). holdings_list = holdings.aggregate_holdings_by( holdings_list, lambda h: (h.account, h.currency, h.cost_currency)) # Get the latest prices from the entries. price_map = prices.build_price_map(entries) # Create transactions to account for each position. new_entries = [] latest_date = entries[-1].date for index, holding in enumerate(holdings_list): if (holding.currency == holding.cost_currency or holding.cost_currency is None): continue # Note: since we're only considering positions held at cost, the # transaction that created the position *must* have created at least one # price point for that commodity, so we never expect for a price not to # be available, which is reasonable. if holding.price_number is None: # An entry without a price might indicate that this is a holding # resulting from leaked cost basis. {0ed05c502e63, b/16} if holding.number: errors.append( UnrealizedError(meta, "A valid price for {h.currency}/{h.cost_currency} " "could not be found".format(h=holding), None)) continue # Compute the PnL; if there is no profit or loss, we create a # corresponding entry anyway. pnl = holding.market_value - holding.book_value if holding.number == ZERO: # If the number of units sum to zero, the holdings should have been # zero. errors.append( UnrealizedError( meta, "Number of units of {} in {} in holdings sum to zero " "for account {} and should not".format( holding.currency, holding.cost_currency, holding.account), None)) continue # Compute the name of the accounts and add the requested subaccount name # if requested. asset_account = holding.account income_account = account.join(account_types.income, account.sans_root(holding.account)) if subaccount: asset_account = account.join(asset_account, subaccount) income_account = account.join(income_account, subaccount) # Create a new transaction to account for this difference in gain. gain_loss_str = "gain" if pnl > ZERO else "loss" narration = ("Unrealized {} for {h.number} units of {h.currency} " "(price: {h.price_number:.4f} {h.cost_currency} as of {h.price_date}, " "average cost: {h.cost_number:.4f} {h.cost_currency})").format( gain_loss_str, h=holding) entry = data.Transaction(data.new_metadata(meta["filename"], lineno=1000 + index), latest_date, flags.FLAG_UNREALIZED, None, narration, EMPTY_SET, EMPTY_SET, []) # Book this as income, converting the account name to be the same, but as income. # Note: this is a rather convenient but arbitrary choice--maybe it would be best to # let the user decide to what account to book it, but I don't a nice way to let the # user specify this. # # Note: we never set a price because we don't want these to end up in Conversions. entry.postings.extend([ data.Posting( asset_account, amount.Amount(pnl, holding.cost_currency), None, None, None, None), data.Posting( income_account, amount.Amount(-pnl, holding.cost_currency), None, None, None, None) ]) new_entries.append(entry) # Ensure that the accounts we're going to use to book the postings exist, by # creating open entries for those that we generated that weren't already # existing accounts. new_accounts = {posting.account for entry in new_entries for posting in entry.postings} open_entries = getters.get_account_open_close(entries) new_open_entries = [] for account_ in sorted(new_accounts): if account_ not in open_entries: meta = data.new_metadata(meta["filename"], index) open_entry = data.Open(meta, latest_date, account_, None, None) new_open_entries.append(open_entry) return (entries + new_open_entries + new_entries, errors)
def Open(account, date): meta = meta_from(account, 'code description') name = account_name(account) commodity = [account.commodity.mnemonic] if account.commodity else None return data.Open(meta, date, name, commodity, None)