def create_balance_entry(self, filename, date, balance): # Balance directives will be sorted in front of transactions, so there is no need # to have a line number to break ties. meta = data.new_metadata(filename, 0) balance_entry = data.Balance(meta, date, self.account, amount.Amount(balance, self.currency), None, None) return balance_entry
def extract(self, file): """Return extracted entries and errors.""" rows = self.parse(file) rows, error_lineno = sort_rows(rows) new_entries = [] if len(rows) == 0: return new_entries for index, row in enumerate(rows): posting = data.Posting( self.account, amount.Amount(row.amount, self.currency), None, None, None, None) # Use the final positional index rather than the lineno of the row because # bean-extract will sort the entries returned by its importers; doing that # using the original line number of the parsed CSV row would undo all the # effort we did to find their correct chronological order. meta = data.new_metadata(file.name, index) payee = None narration = row.description entry = data.Transaction( meta, row.date, self.FLAG, payee, narration, data.EMPTY_SET, data.EMPTY_SET, [posting]) new_entries.append(entry) # Extract balance, but only if we can trust it if error_lineno is not None: logging.warning('{}:{}: cannot reorder rows to agree with balance values'.format( file.name, error_lineno)) elif self.first_day is None: # Create one single balance entry on the day following the last transaction last_row = rows[-1] date = last_row.date + datetime.timedelta(days=1) balance_entry = self.create_balance_entry( file.name, date, last_row.balance) new_entries.append(balance_entry) else: # Create monthly balance entries starting from the most recent one balance_date = periods.next(periods.greatest_start(rows[-1].date, first_day=self.first_day)) for row in reversed(rows): if row.date < balance_date: new_entries.append(self.create_balance_entry( file.name, balance_date, row.balance)) balance_date = periods.prev(balance_date) return new_entries
def event(self, filename, lineno, date, event_type, description, kvlist): """Process an event directive. Args: filename: the current filename. lineno: the current line number. date: a datetime object. event_type: a str, the name of the event type. description: a str, the event value, the contents. kvlist: a list of KeyValue instances. Returns: A new Event object. """ meta = new_metadata(filename, lineno, kvlist) return Event(meta, date, event_type, description)
def note(self, filename, lineno, date, account, comment, kvlist): """Process a note directive. Args: filename: The current filename. lineno: The current line number. date: A datetime object. account: A string, the account to attach the note to. comment: A str, the note's comments contents. kvlist: a list of KeyValue instances. Returns: A new Note object. """ meta = new_metadata(filename, lineno, kvlist) return Note(meta, date, account, comment)
def pad(self, filename, lineno, date, account, source_account, kvlist): """Process a pad directive. Args: filename: The current filename. lineno: The current line number. date: A datetime object. account: A string, the account to be padded. source_account: A string, the account to pad from. kvlist: a list of KeyValue instances. Returns: A new Pad object. """ meta = new_metadata(filename, lineno, kvlist) return Pad(meta, date, account, source_account)
def price(self, filename, lineno, date, currency, amount, kvlist): """Process a price directive. Args: filename: the current filename. lineno: the current line number. date: a datetime object. currency: the currency to be priced. amount: an instance of Amount, that is the price of the currency. kvlist: a list of KeyValue instances. Returns: A new Price object. """ meta = new_metadata(filename, lineno, kvlist) return Price(meta, date, currency, amount)
def account(self, filename, lineno, account): """Check account name validity. Args: account: a str, the account name. Returns: A string, the account name. """ if not self.account_regexp.match(account): meta = new_metadata(filename, lineno) self.errors.append( ParserError(meta, "Invalid account name: {}".format(account), None)) # Intern account names. This should reduces memory usage a # fair bit because these strings are repeated liberally. return self.accounts.setdefault(account, account)
def custom(self, filename, lineno, date, dir_type, custom_values, kvlist): """Process a custom directive. Args: filename: the current filename. lineno: the current line number. date: a datetime object. dir_type: A string, a type for the custom directive being parsed. custom_values: A list of the various tokens seen on the same line. kvlist: a list of KeyValue instances. Returns: A new Custom object. """ meta = new_metadata(filename, lineno, kvlist) return Custom(meta, date, dir_type, custom_values)
def query(self, filename, lineno, date, query_name, query_string, kvlist): """Process a document directive. Args: filename: the current filename. lineno: the current line number. date: a datetime object. query_name: a str, the name of the query. query_string: a str, the SQL query itself. kvlist: a list of KeyValue instances. Returns: A new Query object. """ meta = new_metadata(filename, lineno, kvlist) return Query(meta, date, query_name, query_string)
def summarize(entries, date, account_opening): """Summarize all entries before a date by replacing then with summarization entries. This function replaces the transactions up to (and not including) the given date with a opening balance transactions, one for each account. It returns new entries, all of the transactions before the given date having been replaced by a few summarization entries, one for each account. Notes: - Open entries are preserved for active accounts. - The last relevant price entry for each (base, quote) pair is preserved. - All other entries before the cutoff date are culled. Args: entries: A list of directives. date: A datetime.date instance, the cutoff date before which to summararize. account_opening: A string, the name of the source account to book summarization entries against. Returns: The function returns a list of new entries and the integer index at which the entries on or after the cutoff date begin. """ # Compute balances at date. balances, index = balance_by_account(entries, date) # We need to insert the entries with a date previous to subsequent checks, # to maintain ensure the open directives show up before any transaction. summarize_date = date - datetime.timedelta(days=1) # Create summarization / opening balance entries. summarizing_entries = create_entries_from_balances( balances, summarize_date, account_opening, True, data.new_metadata('<summarize>', 0), flags.FLAG_SUMMARIZE, "Opening balance for '{account}' (Summarization)") # Insert the last price entry for each commodity from before the date. price_entries = prices.get_last_price_entries(entries, date) # Gather the list of active open entries at date. open_entries = get_open_entries(entries, date) # Compute entries before hte date and preserve the entries after the date. before_entries = open_entries + price_entries + summarizing_entries after_entries = entries[index:] # Return a new list of entries and the index that points after the entries # were inserted. return (before_entries + after_entries), len(before_entries)
def conversions(entries, conversion_account, conversion_currency, date=None): """Insert a conversion entry at date 'date' at the given account. Args: entries: A list of entries. conversion_account: A string, the account to book against. conversion_currency: A string, the transfer currency to use for zero prices on the conversion entry. date: The date before which to insert the conversion entry. The new entry will be inserted as the last entry of the date just previous to this date. Returns: A modified list of entries. """ # Compute the balance at the given date. conversion_balance = interpolate.compute_entries_balance(entries, date=date) # Early exit if there is nothing to do. if conversion_balance.is_empty(): return entries # Calculate the index and the date for the new entry. We want to store it as # the last transaction of the day before. if date is not None: index = bisect_key.bisect_left_with_key(entries, date, key=lambda entry: entry.date) last_date = date - datetime.timedelta(days=1) else: index = len(entries) last_date = entries[-1].date meta = data.new_metadata('<conversions>', -1) narration = 'Conversion for {}'.format(conversion_balance) conversion_entry = Transaction(meta, last_date, flags.FLAG_CONVERSIONS, None, narration, None, None, []) for position in conversion_balance.cost().get_positions(): # Important note: Set the cost to zero here to maintain the balance # invariant. (This is the only single place we cheat on the balance rule # in the entire system and this is necessary; see documentation on # Conversions.) price = amount.Amount(ZERO, conversion_currency) conversion_entry.postings.append( data.Posting(conversion_account, -position, price, None, None)) # Make a copy of the list of entries and insert the new transaction into it. new_entries = list(entries) new_entries.insert(index, conversion_entry) return new_entries
def extract(self, f): entries = [] with open(f.name, newline="") as f: for index, row in enumerate(csv.DictReader(f, delimiter=",")): tx_date = parse(row["date"]).date() tx_desc = row["memo"].strip() tx_amt = row["amount"].strip() tx_payee = row["from"].strip() meta = data.new_metadata(f.name, index) tx = data.Transaction( meta=meta, date=tx_date, flag=flags.FLAG_OKAY, payee=tx_payee, narration=tx_desc, tags=set(), links=set(), postings=[], ) if row["type"] == "expense": to = row["to"] tx_amt = -D(tx_amt) else: to = abbr.abbr[row["to"]] tx.postings.append( data.Posting( abbr.abbr[row["from"]], amount.Amount(-D(tx_amt), "TWD"), None, None, None, None, )) tx.postings.append( data.Posting( to, None, None, None, None, None, )) entries.append(tx) return entries
def metadata(self, filepath, lineno, row): """Build transaction metadata dictionary. This method can be extended to add customized metadata entries based on the content of the data row. Args: filepath: Path to the file being imported. lineno: Line number of the data being processed. row: The data row being processed. Returns: A metadata dictionary. """ return data.new_metadata(filepath, lineno)
def finalize(self): """Finalize the parser, check for final errors and return the triple. Returns: A triple of entries: A list of parsed directives, which may need completion. errors: A list of errors, hopefully empty. options_map: A dict of options. """ # If the user left some tags unbalanced, issue an error. for tag in self.tags: meta = new_metadata(self.options['filename'], 0) self.errors.append( ParserError(meta, "Unbalanced tag: '{}'".format(tag), None)) return (self.get_entries(), self.errors, self.get_options())
def test_get_incomplete_postings_normal(self): meta = data.new_metadata(__file__, 0) # Test with a single auto-posting with a residual. entry = data.Transaction( meta, None, None, None, None, data.EMPTY_SET, data.EMPTY_SET, [ P(None, "Assets:Bank:Checking", "105.50", "USD"), P(None, "Assets:Bank:Savings", "-115.50", "USD"), P(None, "Assets:Bank:Balancing", None, None), ]) new_postings, has_inserted, errors, _, __ = booking_simple.get_incomplete_postings( entry, self.OPTIONS_MAP) self.assertTrue(has_inserted) self.assertEqual(3, len(new_postings)) self.assertEqual(0, len(errors)) self.assertTrue(interpolate.AUTOMATIC_META in new_postings[2].meta)
def extract(self, f): entries = [] with open(f.name) as f: for index, row in enumerate(csv.DictReader(f)): if self.isPayment(row) or self.isDone(row): continue meta = data.new_metadata(f.name, index) txn = self.extractRow(row, meta) entries.append(txn) self.addPaymentFor(txn, self.getDate(row), self.getAmt(row)) entries.extend(self.payments.values()) return entries
def Shopping(self,buy): # let's go shopping!! Shoppingbag=[] for idx, row in buy.iterrows(): # continue # debugging currency = row['currency'] currency_IBcommision = row['ibCommissionCurrency'] symbol = row['symbol'] proceeds = amount.Amount(row['proceeds'].__round__(2),currency) commission=amount.Amount((row['ibCommission'].__round__(2)),currency_IBcommision) quantity = amount.Amount(row['quantity'],symbol) price = amount.Amount(row['tradePrice'],currency) text=row['description'] number_per=D(row['tradePrice']) currency_cost=currency cost = position.CostSpec( number_per=price.number, number_total=None, currency=currency, date=row['tradeDate'], label=None, merge=False) postings=[ data.Posting(self.getAssetAccount(symbol), quantity, cost, None, None, None), data.Posting(self.getLiquidityAccount(currency), proceeds, None, None, None, None), data.Posting(self.getLiquidityAccount(currency_IBcommision), commission, None, None, None, None), data.Posting(self.getFeesAccount(currency_IBcommision), minus(commission),None, None, None, None) ] Shoppingbag.append( data.Transaction(data.new_metadata('Buy',0), row['dateTime'].date(), self.flag, symbol, # payee ' '.join(['BUY', quantity.to_string() , '@', price.to_string() ]), data.EMPTY_SET, data.EMPTY_SET, postings )) return Shoppingbag
def validate_commodity_attr(entries, unused_options_map, config_str): """Check that all Commodity directives have a valid attribute. Args: entries: A list of directives. unused_options_map: An options map. config_str: A configuration string. Returns: A list of new errors, if any were found. """ errors = [] # pylint: disable=eval-used config_obj = eval(config_str, {}, {}) if not isinstance(config_obj, dict): errors.append( ConfigError( data.new_metadata('<commodity_attr>', 0), "Invalid configuration for commodity_attr plugin; skipping.", None)) return entries, errors validmap = { attr: frozenset(values) if values is not None else None for attr, values in config_obj.items() } for entry in entries: if not isinstance(entry, data.Commodity): continue for attr, values in validmap.items(): value = entry.meta.get(attr, None) if value is None: errors.append( CommodityError( entry.meta, "Missing attribute '{}' for Commodity directive {}". format(attr, entry.currency), None)) continue if values and value not in values: errors.append( CommodityError( entry.meta, "Invalid attribute '{}' for Commodity".format(value) + " directive {}; valid options: {}".format( entry.currency, ', '.join(values)), None)) return entries, errors
def extract(self, f): entries = [] with open(f.name) as f: for index, row in enumerate(csv.DictReader(f)): account = removeWhitespace(row['\ufeffAccount']) date = parse(row['Date']).date() desc = titlecase(row['Payee']) inflow = row['Outflow'][1:] outflow = row['Inflow'][1:] amt = amount.Amount(D('-'+inflow if inflow != '0.00' else outflow), 'USD') group = removeWhitespace(row['Category Group']) category = removeWhitespace(row['Category']) if desc.find("Transfer : ") != -1 and account != self.main_account: # Let other accounts handle transfers. to_account = removeWhitespace(desc[11:].replace("Transfer : ", "")) elif group == "Inflow": to_account = "Equity:Opening-Balances" else: to_account = "Expenses:"+group+":"+category meta = data.new_metadata(f.name, index) txn = data.Transaction( meta=meta, date=date, flag=flags.FLAG_OKAY, payee=desc, narration="", tags=set(), links=set(), postings=[], ) txn = data.Transaction( meta, date, flags.FLAG_OKAY, None, desc, data.EMPTY_SET, data.EMPTY_SET, [ data.Posting(account, amt, None, None, None, None), data.Posting(to_account, -amt, None, None, None, None), ]) entries.append(txn) return entries
def expense_spread(entries, options_map): new_entries = [] spread_entries = [] meta = data.new_metadata('<spread_account>', 1) new_entries.append( data.Open(meta, datetime.date(2010, 1, 1), SPREAD_ACCOUNT, None, None)) for entry in entries: if isinstance(entry, data.Transaction): new_postings = [] for posting in entry.postings: if SPREAD_KEY in posting.meta: spread_entries.extend(spread_posting(entry, posting)) entry = replace_expenses_accounts(entry, SPREAD_ACCOUNT) new_entries.append(entry) return new_entries + spread_entries, []
def test_validate_documents_paths(self): date = datetime.date(2014, 3, 3) meta = data.new_metadata('<validation_test>', 0) entries = [data.Document(meta, date, 'Assets:Account1', "/abs/path/to/something.pdf", data.EMPTY_SET, data.EMPTY_SET), data.Document(meta, date, 'Assets:Account2', "relative/something.pdf", data.EMPTY_SET, data.EMPTY_SET), data.Document(meta, date, 'Assets:Account2', "../something.pdf", data.EMPTY_SET, data.EMPTY_SET), data.Document(meta, date, 'Assets:Account2', "", data.EMPTY_SET, data.EMPTY_SET)] errors = validation.validate_documents_paths(entries, {}) self.assertEqual(3, len(errors)) self.assertEqual({'Assets:Account2'}, set(error.entry.account for error in errors))
def test_balance_with_large_amount(self): meta = data.new_metadata(__file__, 0) # Test with a single auto-posting with a residual. entry = data.Transaction( meta, None, None, None, None, data.EMPTY_SET, data.EMPTY_SET, [ P(None, "Income:US:Anthem:InsurancePayments", "-275.81", "USD"), P(None, "Income:US:Anthem:InsurancePayments", "-23738.54", "USD"), P(None, "Assets:Bank:Checking", "24014.45", "USD"), ]) new_postings, has_inserted, errors, _, __ = booking_simple.get_incomplete_postings( entry, self.OPTIONS_MAP) self.assertFalse(has_inserted) self.assertEqual(3, len(new_postings)) self.assertEqual(1 if ERRORS_ON_RESIDUAL else 0, len(errors))
def build_lexer_error(self, filename, lineno, message, exc_type=None): # {0e31aeca3363} """Build a lexer error and appends it to the list of pending errors. Args: message: The message of the error. exc_type: An exception type, if an exception occurred. """ if not isinstance(message, str): message = str(message) if exc_type is not None: message = '{}: {}'.format(exc_type.__name__, message) self.errors.append( LexerError(new_metadata(filename, lineno), message, None))
def close_zero(entries, options_map): default_currencies = options_map['operating_currency'] errors = [] currencies = {} new_entries = [] for entry in entries: if isinstance(entry, Open): currencies[entry.account] = entry.currencies elif isinstance(entry, Close): for currency in currencies.get(entry.account, default_currencies): new_entry = Balance(new_metadata('<close_zero>', 0), entry.date + datetime.timedelta(days=1), entry.account, Amount(ZERO, currency), None, None) new_entries.append(new_entry) new_entries.append(entry) return new_entries, errors
def Forex(self,fx): # returns beancount transactions for IBKR forex transactions fxTransactions=[] for idx, row in fx.iterrows(): symbol=row['symbol'] curr_prim,curr_sec = getForexCurrencies(symbol) currency_IBcommision=row['ibCommissionCurrency'] proceeds=amount.Amount(row['proceeds'],curr_sec) quantity = amount.Amount(row['quantity'],curr_prim) price=amount.Amount(row['tradePrice'],curr_sec) commission=amount.Amount(row['ibCommission'],currency_IBcommision) buysell=row['buySell'].name cost = position.CostSpec( number_per=None, number_total=None, currency=None, date=None, label=None, merge=False) postings=[ data.Posting(self.getLiquidityAccount(curr_prim), quantity, None, price, None, None), data.Posting(self.getLiquidityAccount(curr_sec), proceeds, None, None, None, None), data.Posting(self.getLiquidityAccount(currency_IBcommision), commission, None, None, None, None), data.Posting(self.getFeesAccount(currency_IBcommision), minus(commission),None, None, None, None) ] fxTransactions.append( data.Transaction(data.new_metadata('FX Transaction',0), row['tradeDate'], self.flag, symbol, # payee ' '.join([buysell, quantity.to_string() , '@', price.to_string() ]), data.EMPTY_SET, data.EMPTY_SET, postings )) return fxTransactions
def parse(self): d = self.soup tables = d.select('#loopBand2>table>tr') currencies_count = int(len(tables) / 4) transactions = [] for x in range(0, currencies_count): title = tables[x * 4] contents = tables[x * 4 + 3] currency = title.select('#fixBand29 td>table td')[1].text.strip() currency = self.get_currency(currency) bands = contents.select('#loopBand3>table>tr') for band in bands: tds = band.select( 'td>table>tr>td #fixBand9>table>tr>td>table>tr>td') time = self.get_date(tds[1].text.strip()) description = tds[3].text.strip() price = tds[4].text.strip() print("Importing {} at {}".format(description, time)) account = get_account_by_guess(description, '', time) flag = "*" amount = float(price.replace(',', '')) if account == "Unknown": flag = "!" meta = {} meta = data.new_metadata( 'beancount/core/testing.beancount', 12345, meta ) entry = Transaction( meta, time, flag, description, None, data.EMPTY_SET, data.EMPTY_SET, [] ) data.create_simple_posting(entry, account, price, currency) data.create_simple_posting(entry, Account民生, None, None) if not self.deduplicate.find_duplicate(entry, -amount, None, Account民生): transactions.append(entry) self.deduplicate.apply_beans() return transactions
def open_dit_accounts(entries, dit_component): """ Minimally adapted from beancount.plugins.auto_accounts. """ opened_accounts = {entry.account for entry in entries if isinstance(entry, data.Open)} new_entries = [] accounts_first, _ = getters.get_accounts_use_map(entries) for index, (account, date_first_used) in enumerate(sorted(accounts_first.items())): if ((account not in opened_accounts) and has_component(account, dit_component)): meta = data.new_metadata(__name__, index) new_entry = data.Open(meta, date_first_used, account, None, None) new_entries.append(new_entry) return new_entries
def test_budget_importer_decimal_amount(self): extracted_data = self.gi.extract(self.file) trans_date = datetime.datetime.fromtimestamp(1478171818) trans_account = 'Expenses:Groceries' trans_payee = 'Salad' trans_description = '' trans_amount = '2.89' meta = data.new_metadata(self.file.name, 1) self.assertEqual( generate_transaction( meta, trans_date, trans_payee, trans_description, trans_account, trans_amount, self.trans_second_posting_account, ), extracted_data[1])
def test_budget_importer_general_assets_extract(self): extracted_data = self.gi.extract(self.file) trans_date = datetime.datetime.fromtimestamp(1478171878) trans_account = 'Assets:Debt:Homer' trans_payee = 'Abuh' trans_description = 'Lollipop' trans_amount = '10' meta = data.new_metadata(self.file.name, 4) self.assertEqual( generate_transaction( meta, trans_date, trans_payee, trans_description, trans_account, trans_amount, self.trans_second_posting_account, ), extracted_data[4])
def test_budget_importer_general_expense_extract(self): extracted_data = self.gi.extract(self.file) trans_date = datetime.datetime.fromtimestamp(1477864184) trans_account = 'Expenses:Trips' trans_payee = 'NY' trans_description = 'awesome_description' trans_amount = '25' meta = data.new_metadata(self.file.name, 0) self.assertEqual( generate_transaction( meta, trans_date, trans_payee, trans_description, trans_account, trans_amount, self.trans_second_posting_account, ), extracted_data[0])
def extract(self, f): self._import_alias_rules() entries = [] with open(f.name, 'r') as f: for index, row in enumerate(csv.reader(f, delimiter=';')): if index == 0: continue meta = data.new_metadata(f.name, index) trans_date = datetime.datetime.strptime(row[2], "%d/%m/%Y") extracted_account = self._extract_account(row[3]) trans_account = extracted_account['account'] trans_payee = extracted_account['payee'] trans_description = extracted_account['description'] trans_amount = float(row[4].replace(',', '.')) trans_second_posting_account = 'Assets:{}'.format( re.sub(r'Data-(.*).csv', r'\1', os.path.basename(f.name)), ) txn = data.Transaction( meta=meta, date=trans_date, flag=flags.FLAG_OKAY, payee=trans_payee, narration=trans_description, tags=set(), links=set(), postings=[], ) txn.postings.append( data.Posting( trans_account, amount.Amount(round(-1*D(trans_amount), 2), 'EUR'), None, None, None, None ) ) txn.postings.append( data.Posting( trans_second_posting_account, None, None, None, None, None ) ) entries.append(txn) return entries
def extract(self, f): entries = [] with open(f.name, 'r') as f: for index, row in enumerate(csv.reader(f)): meta = data.new_metadata(f.name, index) # Budget timestamp has milisecond value that we need to strip # so that datetime can parse it -> [:-3] trans_date = datetime.datetime.fromtimestamp(float(row[0][:-3])) trans_account = row[1] if re.match('Debt:*', trans_account): trans_account = 'Assets:{}'.format(trans_account) else: trans_account = 'Expenses:{}'.format(trans_account) trans_payee = row[3].split(':')[0] trans_description = ' '.join(row[3].split(':')[1:]) trans_amount = float(row[2]) / 100 txn = data.Transaction( meta=meta, date=trans_date, flag=flags.FLAG_OKAY, payee=trans_payee, narration=trans_description, tags=set(), links=set(), postings=[], ) txn.postings.append( data.Posting( trans_account, amount.Amount(round(-1*D(trans_amount), 2), 'EUR'), None, None, None, None ) ) txn.postings.append( data.Posting( 'Assets:Cash', None, None, None, None, None ) ) entries.append(txn) return entries
def document(self, filename, lineno, date, account, document_filename, kvlist): """Process a document directive. Args: filename: the current filename. lineno: the current line number. date: a datetime object. account: an Account instance. document_filename: a str, the name of the document file. kvlist: a list of KeyValue instances. Returns: A new Document object. """ meta = new_metadata(filename, lineno, kvlist) if not path.isabs(document_filename): document_filename = path.abspath( path.join(path.dirname(filename), document_filename)) return Document(meta, date, account, document_filename)
def process_pair(pair, cleared_tag, cleared_links, same_day_merge): def tag_and_link(entry, cleared_link): tags = (entry.tags or set()) | {cleared_tag} links = (entry.links or set()) | {cleared_link} return entry._replace(tags=tags, links=links) def xform_posting(posting): return data.Posting(posting.account, -posting.units, None, None, None, None) # The first in the pair should be the sender; the second, the receiver. if pair[0].posting.units < pair[1].posting.units: pair = (pair[1], pair[0]) date = max(pair[0].txn.date, pair[1].txn.date) if pair[0].txn.narration == pair[1].txn.narration: narration = pair[0].txn.narration else: narration = '{} / {}'.format(pair[0].txn.narration, pair[1].txn.narration) if pair[0].txn.payee is None: payee = pair[1].txn.payee elif pair[1].txn.payee is None: payee = pair[0].txn.payee elif pair[0].txn.payee == pair[1].txn.payee: payee = pair[0].txn.payee else: payee = '{} / {}'.format(pair[0].txn.payee, pair[1].txn.payee) if same_day_merge and is_pair_mergeable(pair): # Merge the two transactions meta = pair[0].txn.meta flag = pair[0].txn.flag tags = ((pair[0].txn.tags or set()) | (pair[1].txn.tags or set()) | {cleared_tag}) links = ((pair[0].txn.links or set()) | (pair[1].txn.links or set())) or data.EMPTY_SET postings = ([posting for posting in pair[0].txn.postings if posting is not pair[0].posting] + [posting for posting in pair[1].txn.postings if posting is not pair[1].posting]) new_entry = data.Transaction( meta, date, flag, payee, narration, tags, links, postings) return (new_entry, ) # Make sure the connecting entry will be shown between the two existing # ones when looking at the list of entries for their common link lineno = int((pair[0].txn.meta.get('lineno', 0) + pair[1].txn.meta.get('lineno', 0)) / 2) meta = data.new_metadata(__name__, lineno) cleared_link = next(cleared_links) new_entry = data.Transaction( meta, date, flags.FLAG_OKAY, payee, narration, {cleared_tag}, {cleared_link}, [xform_posting(pair[0].posting), xform_posting(pair[1].posting)]) return (tag_and_link(pair[0].txn, cleared_link), tag_and_link(pair[1].txn, cleared_link), new_entry)
def add_unrealized_gains(entries, options_map, subaccount=None): """Insert entries for unrealized capital gains. This function inserts entries that represent unrealized gains, at the end of the available history. It returns a new list of entries, with the new gains inserted. It replaces the account type with an entry in an income account. Optionally, it can book the gain in a subaccount of the original and income accounts. Args: entries: A list of data directives. options_map: A dict of options, that confirms to beancount.parser.options. subaccount: A string, and optional the name of a subaccount to create under an account to book the unrealized gain. If this is left to its default value, the gain is booked directly in the same account. Returns: A list of entries, which includes the new unrealized capital gains entries at the end, and a list of errors. The new list of entries is still sorted. """ errors = [] meta = data.new_metadata('<unrealized_gains>', 0) account_types = options.get_account_types(options_map) # Assert the subaccount name is in valid format. if subaccount: validation_account = account.join(account_types.assets, subaccount) if not account.is_valid(validation_account): errors.append( UnrealizedError(meta, "Invalid subaccount name: '{}'".format(subaccount), None)) return entries, errors if not entries: return (entries, errors) # Group positions by (account, cost, cost_currency). price_map = prices.build_price_map(entries) new_entries = [] # Start at the first month after our first transaction date = date_utils.next_month(entries[0].date) last_month = date_utils.next_month(entries[-1].date) last_holdings_with_currencies = None while date <= last_month: date_entries, holdings_with_currencies, date_errors = add_unrealized_gains_at_date( entries, new_entries, account_types.income, price_map, date, meta, subaccount) new_entries.extend(date_entries) errors.extend(date_errors) if last_holdings_with_currencies: for account_, cost_currency, currency in last_holdings_with_currencies - holdings_with_currencies: # Create a negation transaction specifically to mark that all gains have been realized if subaccount: account_ = account.join(account_, subaccount) latest_unrealized_entry = find_previous_unrealized_transaction(new_entries, account_, cost_currency, currency) if not latest_unrealized_entry: continue entry = data.Transaction(data.new_metadata(meta["filename"], lineno=999, kvlist={'prev_currency': currency}), date, flags.FLAG_UNREALIZED, None, 'Clear unrealized gains/losses of {}'.format(currency), set(), set(), []) # Negate the previous transaction because of unrealized gains are now 0 for posting in latest_unrealized_entry.postings[:2]: entry.postings.append( data.Posting( posting.account, -posting.units, None, None, None, None)) new_entries.append(entry) last_holdings_with_currencies = holdings_with_currencies date = date_utils.next_month(date) # Ensure that the accounts we're going to use to book the postings exist, by # creating open entries for those that we generated that weren't already # existing accounts. new_accounts = {posting.account for entry in new_entries for posting in entry.postings} open_entries = getters.get_account_open_close(entries) new_open_entries = [] for index, account_ in enumerate(sorted(new_accounts)): if account_ not in open_entries: meta = data.new_metadata(meta["filename"], index) open_entry = data.Open(meta, new_entries[0].date, account_, None, None) new_open_entries.append(open_entry) return (entries + new_open_entries + new_entries, errors)
def __init__(self, *args, **kwargs): entries_filename = kwargs.pop('entries_filename', '<config>') self.source = data.new_metadata(entries_filename, 0) super(ArgumentParser, self).__init__(*args, **kwargs)
def add_unrealized_gains_at_date(entries, unrealized_entries, income_account_type, price_map, date, meta, subaccount): """Insert/remove entries for unrealized capital gains This function takes a list of entries and a date and creates a set of unrealized gains transactions, negating previous unrealized gains transactions within the same account. Args: entries: A list of data directives. unrealized_entries: A list of previously generated unrealized transactions. income_account_type: The income account type. price_map: A price map returned by prices.build_price_map. date: The effective date to generate the unrealized transactions for. meta: meta. subaccount: A string, and optional the name of a subaccount to create under an account to book the unrealized gain. If this is left to its default value, the gain is booked directly in the same account. Returns: A list of newly created unrealized transactions and a list of errors. """ errors = [] entries_truncated = summarize.truncate(entries, date + ONEDAY) holdings_list = holdings.get_final_holdings(entries_truncated, price_map=price_map, date=date) # Group positions by (account, cost, cost_currency). holdings_list = holdings.aggregate_holdings_by( holdings_list, lambda h: (h.account, h.currency, h.cost_currency)) holdings_with_currencies = set() # Create transactions to account for each position. new_entries = [] for index, holding in enumerate(holdings_list): if (holding.currency == holding.cost_currency or holding.cost_currency is None): continue # Note: since we're only considering positions held at cost, the # transaction that created the position *must* have created at least one # price point for that commodity, so we never expect for a price not to # be available, which is reasonable. if holding.price_number is None: # An entry without a price might indicate that this is a holding # resulting from leaked cost basis. {0ed05c502e63, b/16} if holding.number: errors.append( UnrealizedError(meta, "A valid price for {h.currency}/{h.cost_currency} " "could not be found".format(h=holding), None)) continue # Compute the PnL; if there is no profit or loss, we create a # corresponding entry anyway. pnl = holding.market_value - holding.book_value if holding.number == ZERO: # If the number of units sum to zero, the holdings should have been # zero. errors.append( UnrealizedError( meta, "Number of units of {} in {} in holdings sum to zero " "for account {} and should not".format( holding.currency, holding.cost_currency, holding.account), None)) continue # Compute the name of the accounts and add the requested subaccount name # if requested. asset_account = holding.account income_account = account.join(income_account_type, account.sans_root(holding.account)) if subaccount: asset_account = account.join(asset_account, subaccount) income_account = account.join(income_account, subaccount) holdings_with_currencies.add((holding.account, holding.cost_currency, holding.currency)) # Find the previous unrealized gain entry to negate and decide if we # should create a new posting. latest_unrealized_entry = find_previous_unrealized_transaction(unrealized_entries, asset_account, holding.cost_currency, holding.currency) # Don't create a new transaction if our last one hasn't changed. if (latest_unrealized_entry and pnl == latest_unrealized_entry.postings[0].units.number): continue # Don't bother creating a blank unrealized transaction if none existed if pnl == ZERO and not latest_unrealized_entry: continue relative_pnl = pnl if latest_unrealized_entry: relative_pnl = pnl - latest_unrealized_entry.postings[0].units.number # Create a new transaction to account for this difference in gain. gain_loss_str = "gain" if relative_pnl > ZERO else "loss" narration = ("Unrealized {} for {h.number} units of {h.currency} " "(price: {h.price_number:.4f} {h.cost_currency} as of {h.price_date}, " "average cost: {h.cost_number:.4f} {h.cost_currency})").format( gain_loss_str, h=holding) entry = data.Transaction(data.new_metadata(meta["filename"], lineno=1000 + index, kvlist={'prev_currency': holding.currency}), date, flags.FLAG_UNREALIZED, None, narration, set(), set(), []) # Book this as income, converting the account name to be the same, but as income. # Note: this is a rather convenient but arbitraty choice--maybe it would be best to # let the user decide to what account to book it, but I don't a nice way to let the # user specify this. # # Note: we never set a price because we don't want these to end up in Conversions. entry.postings.extend([ data.Posting( asset_account, amount.Amount(pnl, holding.cost_currency), None, None, None, None), data.Posting( income_account, amount.Amount(-pnl, holding.cost_currency), None, None, None, None) ]) if latest_unrealized_entry: for posting in latest_unrealized_entry.postings[:2]: entry.postings.append( data.Posting( posting.account, -posting.units, None, None, None, None)) new_entries.append(entry) return new_entries, holdings_with_currencies, errors