def load_csv_and_prices(holdings_filename, prices_filename, currency): """Load the holdings and prices from filenames and convert to a common currency. Args: holdings_filename: A string, the name of a CSV file containing the list of Holdings. prices_filename: A string, the name of a Beancount file containing price directives. currency: A string, the target currency to convert all the holdings to. Returns: Two lists of holdings: a list in the original currencies, and a list all converted to the target currency. """ # Load the price database. # Generate with "bean-query LEDGER holdings" price_entries, errors, options_map = loader.load(prices_filename) price_map = prices.build_price_map(price_entries) # Load the holdings list. # Generate with "bean-query LEDGER print_prices" mixed_holdings_list = list( holdings_reports.load_from_csv(open(holdings_filename))) # Convert all the amounts to a common currency (otherwise summing market # values makes no sense). holdings_list = holdings.convert_to_currency(price_map, currency, mixed_holdings_list) return mixed_holdings_list, holdings_list
def load_file(self): """Load the main file and all included files and set attributes.""" # use the internal function to disable cache if not self._is_encrypted: # pylint: disable=protected-access self.all_entries, self.errors, self.options = \ loader._load([(self.beancount_file_path, True)], None, None, None) include_path = os.path.dirname(self.beancount_file_path) self._watcher.update(self.options['include'], [ os.path.join(include_path, path) for path in self.options['documents'] ]) else: self.all_entries, self.errors, self.options = \ loader.load_file(self.beancount_file_path) self.price_map = prices.build_price_map(self.all_entries) self.account_types = get_account_types(self.options) self.all_root_account = realization.realize(self.all_entries, self.account_types) if self.options['render_commas']: self._format_string = '{:,f}' self._default_format_string = '{:,.2f}' else: self._format_string = '{:f}' self._default_format_string = '{:.2f}' self.fava_options, errors = parse_options( filter_type(self.all_entries, Custom)) self.errors.extend(errors) for mod in MODULES: getattr(self, mod).load_file() self.filter(True)
def test_get_final_holdings_with_prices(self, entries, _, __): """ 2013-01-01 open Assets:Account1 2013-01-01 open Assets:Account2 2013-01-01 open Assets:Account3 2013-01-01 open Assets:Cash 2013-01-01 open Equity:Unknown 2013-04-05 * Equity:Unknown Assets:Cash 50000 USD 2013-04-01 * Assets:Account1 15 HOOL {518.73 USD} Assets:Cash 2013-06-01 price HOOL 578.02 USD """ price_map = prices.build_price_map(entries) holdings_list = holdings.get_final_holdings(entries, ('Assets', 'Liabilities'), price_map) holdings_list = sorted(map(tuple, holdings_list)) expected_values = [ ('Assets:Account1', D('15'), 'HOOL', D('518.73'), 'USD', D('7780.95'), D('8670.30'), D('578.02'), datetime.date(2013, 6, 1)), ('Assets:Cash', D('42219.05'), 'USD', None, 'USD', D('42219.05'), D('42219.05'), None, None), # Notice no Equity account. ] self.assertEqual(expected_values, holdings_list)
def main(): parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Filename') args = parser.parse_args() entries, errors, options_map = loader.load_file(args.filename) price_map = prices.build_price_map(entries) # commodity_map = getters.get_commodity_map(entries, options_map) # ticker_info = getters.get_values_meta(commodity_map, 'name', 'ticker', 'quote') #pprint(price_map) # print('Fetching:') diff_threshold = 2.00 # pct, regardless of time. for (base, quote), rates in sorted(price_map.items()): if is_option(base) or is_option(quote): continue if not rates: continue print(base, quote) riter = iter(rates) prev_date, prev_rate = next(riter) for date, rate in riter: days = (date - prev_date).days diff_pct = (float(rate / prev_rate) - 1.0) / days * 100 if abs(diff_pct) > diff_threshold: print(' {:3d} {:.5f} {:6.2f}'.format(days, rate, diff_pct)) prev_date, prev_rate = date, rate
def compute_timeline_and_returns(entries, options_map, transfer_account, accounts_value, accounts_internal, accounts_internalize=None, date_begin=None, date_end=None): """Compute a timeline and the returns of a portfolio of accounts. Args: entries: A list of directives that may affect the account. transfer_account: A string, the name of an account to use for internalizing entries which need to be split between internal and external flows. A good default value would be an equity account, 'Equity:Internalized' or something like that. accounts_value: A set of account name strings, the names of the asset accounts included in valuing the portfolio. accounts_internal: A set of account name strings, the names of internal flow accounts (normally income and expenses) that aren't external flows. accounts_internalize: A set of account name strings used to force internalization. See internalize() for details. price_map: An instance of PriceMap as computed by prices.build_price_map(). date_begin: A datetime.date instance, the beginning date of the period to compute returns over. date_end: A datetime.date instance, the end date of the period to compute returns over. Returns: A triple of returns: A dict of currency -> float total returns. dates: A pair of (date_first, date_last) datetime.date instances. """ timeline = create_timeline(entries, options_map, transfer_account, accounts_value, accounts_internal, accounts_internalize) price_map = prices.build_price_map(entries) return compute_returns(timeline, price_map, date_begin, date_end)
def bucketize(vbalance, base_currency, entries): price_map = prices.build_price_map(entries) commodity_map = getters.get_commodity_map(entries) # Main part: put each commodity's value into asset buckets asset_buckets = defaultdict(int) for pos in vbalance.get_positions(): amount = convert.convert_position(pos, base_currency, price_map) if amount.number < 0: # print("Warning: skipping negative balance:", pos) #TODO continue if amount.currency == pos.units.currency and amount.currency != base_currency: sys.stderr.write( "Error: unable to convert {} to base currency {} (Missing price directive?)\n" .format(pos, base_currency)) sys.exit(1) commodity = pos.units.currency metas = commodity_map[commodity].meta unallocated = Decimal('100') for meta in metas: if meta.startswith('asset_allocation_'): asset_buckets[meta[len('asset_allocation_' ):]] += amount.number * (metas[meta] / 100) unallocated -= metas[meta] if unallocated: print( "Warning: {} asset_allocation_* metadata does not add up to 100%. Padding with 'unknown'." .format(commodity)) asset_buckets['unknown'] += amount.number * (unallocated / 100) return asset_buckets
def test_build_price_map(self, entries, _, __): """ 2013-06-01 price USD 1.10 CAD ;; Try some prices at the same date. 2013-06-02 price USD 1.11 CAD 2013-06-02 price USD 1.12 CAD 2013-06-02 price USD 1.13 CAD ;; One after too. 2013-06-03 price USD 1.14 CAD ;; Try a few inverse prices. 2013-06-05 price CAD 0.86956 USD 2013-06-06 price CAD 0.86207 USD """ price_map = prices.build_price_map(entries) self.assertEqual(2, len(price_map)) self.assertEqual(set([('USD', 'CAD'), ('CAD', 'USD')]), set(price_map.keys())) values = price_map[('USD', 'CAD')] expected = [(datetime.date(2013, 6, 1), D('1.10')), (datetime.date(2013, 6, 2), D('1.13')), (datetime.date(2013, 6, 3), D('1.14')), (datetime.date(2013, 6, 5), D('1.15')), (datetime.date(2013, 6, 6), D('1.16'))] for (exp_date, exp_value), (act_date, act_value) in zip(expected, values): self.assertEqual(exp_date, act_date) self.assertEqual(exp_value, act_value.quantize(D('0.01'))) self.assertEqual(5, len(price_map[('CAD', 'USD')]))
def get_assets_holdings(entries, options_map, currency=None): """Return holdings for all assets and liabilities. Args: entries: A list of directives. options_map: A dict of parsed options. currency: If specified, a string, the target currency to convert all holding values to. Returns: A list of Holding instances and a price-map. """ # Compute a price map, to perform conversions. price_map = prices.build_price_map(entries) # Get the list of holdings. account_types = options.get_account_types(options_map) holdings_list = holdings.get_final_holdings( entries, (account_types.assets, account_types.liabilities), price_map) # Convert holdings to a unified currency. if currency: holdings_list = holdings.convert_to_currency(price_map, currency, holdings_list) return holdings_list, price_map
def load_file(self) -> None: """Load the main file and all included files and set attributes.""" # use the internal function to disable cache if not self._is_encrypted: # pylint: disable=protected-access self.all_entries, self.errors, self.options = _load( [(self.beancount_file_path, True)], None, None, None) else: self.all_entries, self.errors, self.options = load_file( self.beancount_file_path) self.get_filtered.cache_clear() self.account_types = get_account_types(self.options) self.price_map = build_price_map(self.all_entries) self.all_root_account = realization.realize(self.all_entries, self.account_types) self.all_entries_by_type = group_entries_by_type(self.all_entries) self.accounts = AccountDict() for open_entry in self.all_entries_by_type.Open: self.accounts.setdefault(open_entry.account).meta = open_entry.meta for close in self.all_entries_by_type.Close: self.accounts.setdefault(close.account).close_date = close.date self.fava_options, errors = parse_options( self.all_entries_by_type.Custom) self.errors.extend(errors) if not self._is_encrypted: self._watcher.update(*self.paths_to_watch()) for mod in MODULES: getattr(self, mod).load_file()
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Beancount ledger filename') args, pipeline_args = parser.parse_known_args() # Read the ledger. logging.info("Reading ledger.") t1 = time.time() entries, errors, options_map = loader.load_file(args.filename) postings = (beam.Row(posting.account, posting.units.number, posting.units.currency) for entry in data.filter_txns(entries) for posting in entry.postings) price_map = prices.build_price_map(entries) t2 = time.time() logging.info("Read ledger in %.1fsecs.", t2 - t1) with CreatePipeline(pipeline_args) as pipeline: _ = (pipeline | beam.Create(postings) | SqlTransform(""" SELECT account FROM PCOLLECTION """, dialect="zetasql") | beam.Map(print))
def __init__(self, entries, options_map, currency): self.entries = entries self.options_map = options_map self.currency = currency if self.currency: self.etype = "envelope" + self.currency else: self.etype = "envelope" self.start_date, self.budget_accounts, self.mappings, self.income_accounts = self._find_envelop_settings( ) if not self.currency: self.currency = self._find_currency(options_map) decimal_precison = '0.00' self.Q = Decimal(decimal_precison) # Compute start of period # TODO get start date from journal today = datetime.date.today() self.date_start = datetime.datetime.strptime(self.start_date, '%Y-%m').date() # TODO should be able to assert errors # Compute end of period self.date_end = datetime.date(today.year, today.month, today.day) self.price_map = prices.build_price_map(entries) self.acctypes = options.get_account_types(options_map)
def test_get_all_prices(self, entries, _, __): """ 2013-06-01 price USD 1.01 CAD 2013-06-03 price USD 1.03 CAD 2013-06-05 price USD 1.05 CAD 2013-06-07 price USD 1.07 CAD 2013-06-09 price USD 1.09 CAD 2013-06-11 price USD 1.11 CAD """ price_map = prices.build_price_map(entries) price_list = prices.get_all_prices(price_map, ('USD', 'CAD')) expected = [(datetime.date(2013, 6, 1), D('1.01')), (datetime.date(2013, 6, 3), D('1.03')), (datetime.date(2013, 6, 5), D('1.05')), (datetime.date(2013, 6, 7), D('1.07')), (datetime.date(2013, 6, 9), D('1.09')), (datetime.date(2013, 6, 11), D('1.11'))] self.assertEqual(expected, price_list) inv_price_list = prices.get_all_prices(price_map, ('CAD', 'USD')) self.assertEqual(len(price_list), len(inv_price_list)) # Test not found. with self.assertRaises(KeyError): prices.get_all_prices(price_map, ('EWJ', 'JPY'))
def region(filename, region, conversion): """Print out a list of transactions within REGION and compute balances. The REGION argument is either a stard:end line numbers tuple or a filename:start:end triplet to indicate a region in a ledger file included from the main input file. """ search_filename, first_lineno, last_lineno = region if search_filename is None: search_filename = filename entries, errors, options_map = loader.load_file(filename) # Find all the entries in the region. (To be clear, this isn't like the # 'linked' command, none of the links are followed.) region_entries = [ entry for entry in data.filter_txns(entries) if (entry.meta['filename'] == search_filename and first_lineno <= entry.meta['lineno'] <= last_lineno) ] price_map = prices.build_price_map( entries) if conversion == 'value' else None render_mini_balances(region_entries, options_map, conversion, price_map)
def test_build_price_map_zero_prices(self, entries, _, __): """ 1999-12-27 commodity EFA 2010-10-01 price EFA 57.53 EFA 2010-11-01 price EFA 0 EFA 2011-03-01 price EFA 60.69 EFA """ price_map = prices.build_price_map(entries) self.assertIsNotNone(price_map)
def main_saved_for_later(): # Generate price reports. output_prices = path.join(args.output, "prices") reports.generate_price_pages(account_data_map, prices.build_price_map(entries), output_prices) # Output required price directives (to be filled in the source ledger by # fetching prices). reports.write_price_directives(path.join(output_prices, "prices.beancount"), pricer, args.days_price_threshold)
def forward_method(self, entries, errors, options_map, file, fwdfunc=value): account_types = options.get_account_types(options_map) real_root = realization.realize(entries, account_types) price_map = prices.build_price_map(entries) # Note: When we forward, use the latest date (None). return fwdfunc(self, real_root, price_map, None, options_map, file)
def get_prices_table(entries: data.Entries, main_currency: str) -> Table: """Enumerate all the prices seen.""" price_map = prices.build_price_map(entries) header = ['currency', 'cost_currency', 'price_file'] rows = [] for base_quote in price_map.keys(): _, price = prices.get_latest_price(price_map, base_quote) if price is None: continue base, quote = base_quote rows.append([base, quote, price.quantize(PRICE_Q)]) return Table(header, rows)
def render_beancount(self, entries, errors, options_map, file): dcontext = options_map['dcontext'] price_map = prices.build_price_map(entries) meta = data.new_metadata('<report_prices_db>', 0) for base_quote in price_map.forward_pairs: price_list = price_map[base_quote] base, quote = base_quote for date, price in price_list: entry = data.Price(meta, date, base, amount.Amount(price, quote)) file.write(printer.format_entry(entry, dcontext)) file.write('\n')
def get_rates_table(entries: data.Entries, currencies: Set[str], main_currency: str) -> Table: """Enumerate all the exchange rates.""" price_map = prices.build_price_map(entries) header = ['cost_currency', 'rate_file'] rows = [] for currency in currencies: _, rate = prices.get_latest_price(price_map, (currency, main_currency)) if rate is None: continue rows.append([currency, rate.quantize(PRICE_Q)]) return Table(header, rows)
def test_lookup_price_and_inverse(self, entries, _, __): """ 2013-06-01 price USD 1.01 CAD """ price_map = prices.build_price_map(entries) # Ensure that the forward exception includes the forward detail. try: prices._lookup_price_and_inverse(price_map, ('EUR', 'USD')) self.fail("Exception not raised") except KeyError as exc: self.assertRegex(str(exc), "('EUR', 'USD')")
def do_region(filename, args, conversion=None): """Print out a list of transactions in a region and balances. Args: filename: A string, which consists in the filename. args: A tuple of the rest of arguments. We're expecting the first argument to be a string which contains either a lineno integer or a (filename:)?lineno:lineno combination (which can be used if the location is not in the top-level file). convert: A string, one of None, 'value', or 'cost'; if set, convert balances output to market value (or cost). """ # Parse the arguments, get the line number. if len(args) != 1: raise SystemExit("Missing line number or link argument.") location_spec = args[0] # Load the input file. entries, errors, options_map = loader.load_file(filename) # Parse the argument as a line number or a # "<filename>:<lineno>:<lineno>" spec to pull context from, with # optional filename and optional last line number. # # If a filename is not provided, the ledger's top-level filename is used # (this is the common case). An explicit filename is used to get context # in included files. # # If a single line number is provided the closest transaction is # selected. If an internal of line numbers is provided, the list of all # transactions whose first line is inside the interval are selected. match = re.match(r"(?:(.+?):)?(\d+):(\d+)$", location_spec) if not match: raise SystemExit("Invalid line number or link format for region.") included_filename, first_line, last_line = match.groups() search_filename = (path.abspath(included_filename) if included_filename else options_map['filename']) lineno = int(first_line) last_lineno = int(last_line) # Find all the entries in the region. (To be clear, this isn't like the # 'linked' command, none of the links are followed.) region_entries = [ entry for entry in data.filter_txns(entries) if (entry.meta['filename'] == search_filename and lineno <= entry.meta['lineno'] <= last_lineno) ] price_map = prices.build_price_map( entries) if conversion == 'value' else None render_mini_balances(region_entries, options_map, conversion, price_map)
def create_row_context(entries, options_map): """Create the context container which we will use to evaluate rows.""" context = RowContext() context.balance = inventory.Inventory() # Initialize some global properties for use by some of the accessors. context.options_map = options_map context.account_types = options.get_account_types(options_map) context.open_close_map = getters.get_account_open_close(entries) context.commodity_map = getters.get_commodity_directives(entries) context.price_map = prices.build_price_map(entries) return context
def build_price_map_util(date_currency_price_tuples): """Build a partial price-map just for testing. Args: date_currency_price_tuples: A list of (datetime.date, currency-string, price-Amount) tuples to fill in the database with. Returns: A price_map, as per build_price_map(). """ return prices.build_price_map([ data.Price(None, date, currency, price) for date, currency, price in date_currency_price_tuples ])
def test_project_missing(self, entries, _, __): """ 2013-06-15 price HOOL 1000.00 USD 2013-07-01 price USD 1.12 CAD 2013-07-15 price HOOL 1100.00 USD """ price_map = prices.build_price_map(entries) new_price_map = prices.project(price_map, "USD", "CAD") # Check that there haven't been conversions before a price was # available. {b2b23353275d} self.assertEqual([(datetime.date(2013, 7, 15), D('1232.0000'))], prices.get_all_prices(new_price_map, ("HOOL", "CAD")))
def test_get_price(self, entries, _, __): """ 2013-06-01 price USD 1.00 CAD 2013-06-10 price USD 1.50 CAD 2013-07-01 price USD 2.00 CAD """ price_map = prices.build_price_map(entries) date, price = prices.get_price(price_map, 'USD/CAD', datetime.date(2013, 5, 15)) self.assertEqual(None, price) self.assertEqual(None, date) date, price = prices.get_price(price_map, 'USD/CAD', datetime.date(2013, 6, 1)) self.assertEqual(D('1.00'), price) self.assertEqual(datetime.date(2013, 6, 1), date) date, price = prices.get_price(price_map, 'USD/CAD', datetime.date(2013, 6, 5)) self.assertEqual(D('1.00'), price) self.assertEqual(datetime.date(2013, 6, 1), date) date, price = prices.get_price(price_map, 'USD/CAD', datetime.date(2013, 6, 10)) self.assertEqual(D('1.50'), price) self.assertEqual(datetime.date(2013, 6, 10), date) date, price = prices.get_price(price_map, 'USD/CAD', datetime.date(2013, 6, 20)) self.assertEqual(D('1.50'), price) self.assertEqual(datetime.date(2013, 6, 10), date) date, price = prices.get_price(price_map, 'USD/CAD', datetime.date(2013, 7, 1)) self.assertEqual(D('2.00'), price) self.assertEqual(datetime.date(2013, 7, 1), date) date, price = prices.get_price(price_map, 'USD/CAD', datetime.date(2013, 7, 15)) self.assertEqual(D('2.00'), price) self.assertEqual(datetime.date(2013, 7, 1), date) # With no date, should devolved to get_latest_price(). date, price = prices.get_price(price_map, 'USD/CAD', None) self.assertEqual(D('2.00'), price) self.assertEqual(datetime.date(2013, 7, 1), date) # Test not found. result = prices.get_price(price_map, ('EWJ', 'JPY')) self.assertEqual((None, None), result)
def load_file(self) -> None: """Load the main file and all included files and set attributes.""" # use the internal function to disable cache if not self._is_encrypted: # pylint: disable=protected-access self.all_entries, self.errors, self.options = loader._load( [(self.beancount_file_path, True)], None, None, None ) else: self.all_entries, self.errors, self.options = loader.load_file( self.beancount_file_path ) self.account_types = get_account_types(self.options) self.price_map = build_price_map(self.all_entries) self.all_root_account = realization.realize( self.all_entries, self.account_types ) entries_by_type: DefaultDict[ Type[Directive], Entries ] = collections.defaultdict(list) for entry in self.all_entries: entries_by_type[type(entry)].append(entry) self.all_entries_by_type = entries_by_type self.accounts = AccountDict() for entry in entries_by_type[Open]: self.accounts.setdefault( cast(Open, entry).account ).meta = entry.meta for entry in entries_by_type[Close]: self.accounts.setdefault( cast(Close, entry).account ).close_date = entry.date self.fava_options, errors = parse_options( cast(List[Custom], entries_by_type[Custom]) ) self.errors.extend(errors) if not self._is_encrypted: self._watcher.update(*self.paths_to_watch()) for mod in MODULES: getattr(self, mod).load_file() self.filters = Filters(self.options, self.fava_options) self.filter(True)
def test_get_latest_price(self, entries, _, __): """ 2013-06-01 price USD 1.01 CAD 2013-06-09 price USD 1.09 CAD 2013-06-11 price USD 1.11 CAD """ price_map = prices.build_price_map(entries) price_list = prices.get_latest_price(price_map, ('USD', 'CAD')) expected = (datetime.date(2013, 6, 11), D('1.11')) self.assertEqual(expected, price_list) # Test not found. result = prices.get_latest_price(price_map, ('EWJ', 'JPY')) self.assertEqual((None, None), result)
def test_project_collisions(self, entries, _, __): """ 2013-06-01 price USD 1.12 CAD 2013-06-15 price HOOL 1000.00 USD 2013-06-15 price HOOL 1125.00 CAD """ price_map = prices.build_price_map(entries) new_price_map = prices.project(price_map, "USD", "CAD") # Check that the original prices in the database were not overridden. # See {97a5703ac517}. self.assertEqual([(datetime.date(2013, 6, 15), D('1120.0000')), (datetime.date(2013, 6, 15), D('1125.00'))], prices.get_all_prices(new_price_map, ("HOOL", "CAD")))
def wrapper(*posargs, **kwargs): filename = app.args.filename if loader.needs_refresh(app.options): logging.info('Reloading...') # Save the source for later, to render. with open(filename, encoding='utf8') as f: app.source = f.read() # Parse the beancount file. entries, errors, options_map = loader.load_file(filename) # Print out the list of errors. if errors: # pylint: disable=unsupported-assignment-operation request.params['render_overlay'] = True print( ',----------------------------------------------------------------' ) printer.print_errors(errors, file=sys.stdout) print( '`----------------------------------------------------------------' ) # Save globals in the global app. app.entries = entries app.errors = errors app.options = options_map app.account_types = options.get_account_types(options_map) # Pre-compute the price database. app.price_map = prices.build_price_map(entries) # Pre-compute the list of active years. app.active_years = list(getters.get_active_years(entries)) # Reset the view cache. app.views.clear() else: # For now, the overlay is a link to the errors page. Always render # it on the right when there are errors. if app.errors: # pylint: disable=unsupported-assignment-operation request.params['render_overlay'] = True return callback(*posargs, **kwargs)
def get_date_rates(self, entries): if not self.args.commodity: self.parser.error( "Commodity pair must be specified (in BASE/QUOTE format)") if not re.match('{ccy}/{ccy}$'.format(ccy=amount.CURRENCY_RE), self.args.commodity): self.parser.error( ('Invalid commodity pair "{}"; ' 'must be in BASE/QUOTE format').format(self.args.commodity)) price_map = prices.build_price_map(entries) try: date_rates = prices.get_all_prices(price_map, self.args.commodity) except KeyError: self.parser.error("Commodity not present in database: {}".format( self.args.commodity)) return date_rates
def load_file(self): """Load the main file and all included files and set attributes.""" # use the internal function to disable cache if not self._is_encrypted: # pylint: disable=protected-access self.all_entries, self.errors, self.options = \ loader._load([(self.beancount_file_path, True)], None, None, None) self.account_types = get_account_types(self.options) self._watcher.update(*self.paths_to_watch()) else: self.all_entries, self.errors, self.options = \ loader.load_file(self.beancount_file_path) self.account_types = get_account_types(self.options) self.price_map = prices.build_price_map(self.all_entries) self.all_root_account = realization.realize(self.all_entries, self.account_types) entries_by_type = collections.defaultdict(list) for entry in self.all_entries: entries_by_type[type(entry)].append(entry) self.all_entries_by_type = entries_by_type self.accounts = _AccountDict() for entry in entries_by_type[Open]: self.accounts.setdefault(entry.account).meta = entry.meta for entry in entries_by_type[Close]: self.accounts.setdefault(entry.account).close_date = entry.date self.fava_options, errors = parse_options(entries_by_type[Custom]) self.errors.extend(errors) for mod in MODULES: getattr(self, mod).load_file() self._filters = { 'account': AccountFilter(self.options, self.fava_options), 'filter': AdvancedFilter(self.options, self.fava_options), 'time': TimeFilter(self.options, self.fava_options), } self.filter(True)