Exemplo n.º 1
0
    def test_get_final_holdings_with_prices(self, entries, _, __):
        """
        2013-01-01 open Assets:Account1
        2013-01-01 open Assets:Account2
        2013-01-01 open Assets:Account3
        2013-01-01 open Assets:Cash
        2013-01-01 open Equity:Unknown

        2013-04-05 *
          Equity:Unknown
          Assets:Cash			50000 USD

        2013-04-01 *
          Assets:Account1             15 HOOL {518.73 USD}
          Assets:Cash

        2013-06-01 price HOOL  578.02 USD

        """
        price_map = prices.build_price_map(entries)
        holdings_list = holdings.get_final_holdings(entries,
                                                    ('Assets', 'Liabilities'),
                                                    price_map)

        holdings_list = sorted(map(tuple, holdings_list))
        expected_values = [
            ('Assets:Account1', D('15'), 'HOOL', D('518.73'), 'USD',
             D('7780.95'), D('8670.30'),
             D('578.02'), datetime.date(2013, 6, 1)),
            ('Assets:Cash', D('42219.05'), 'USD', None, 'USD',
             D('42219.05'), D('42219.05'), None, None),
            # Notice no Equity account.
        ]
        self.assertEqual(expected_values, holdings_list)
Exemplo n.º 2
0
    def load_file(self):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""
        with open(self.beancount_file_path, encoding='utf8') as f:
            self.source = f.read()

        self.entries, self._errors, self.options = loader.load_file(self.beancount_file_path)
        self.all_entries = self.entries
        self.price_map = prices.build_price_map(self.all_entries)

        self.title = self.options['title']

        self.errors = []
        for error in self._errors:
            self.errors.append({
                'file': error.source['filename'],
                'line': error.source['lineno'],
                'error': error.message,
                'entry': error.entry  # TODO render entry
            })

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))

        self.account_types = options.get_account_types(self.options)
        self.real_accounts = realization.realize(self.entries, self.account_types)
        self.all_accounts = self._account_components()
Exemplo n.º 3
0
    def load_file(self):
        """Load the main file and all included files and set attributes."""
        # use the internal function to disable cache
        if not self._is_encrypted:
            # pylint: disable=protected-access
            self.all_entries, self.errors, self.options = \
                loader._load([(self.beancount_file_path, True)],
                             None, None, None)
            include_path = os.path.dirname(self.beancount_file_path)
            self._watcher.update(self.options['include'], [
                os.path.join(include_path, path)
                for path in self.options['documents']
            ])
        else:
            self.all_entries, self.errors, self.options = \
                loader.load_file(self.beancount_file_path)
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = get_account_types(self.options)
        self.all_root_account = realization.realize(self.all_entries,
                                                    self.account_types)
        if self.options['render_commas']:
            self._format_string = '{:,f}'
            self._default_format_string = '{:,.2f}'
        else:
            self._format_string = '{:f}'
            self._default_format_string = '{:.2f}'

        self.fava_options, errors = parse_options(
            filter_type(self.all_entries, Custom))
        self.errors.extend(errors)

        for mod in MODULES:
            getattr(self, mod).load_file()

        self.filter(True)
Exemplo n.º 4
0
    def load_file(self):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""
        self.all_entries, self.errors, self.options = \
            loader.load_file(self.beancount_file_path)
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options['title']
        if self.options['render_commas']:
            self.format_string = '{:,f}'
            self.default_format_string = '{:,.2f}'
        else:
            self.format_string = '{:f}'
            self.default_format_string = '{:.2f}'

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))

        self.queries = _filter_entries_by_type(self.all_entries, Query)

        self.all_root_account = realization.realize(self.all_entries,
                                                    self.account_types)
        self.all_accounts = _list_accounts(self.all_root_account)
        self.all_accounts_leaf_only = _list_accounts(self.all_root_account,
                                                     leaf_only=True)

        self.sidebar_links = _sidebar_links(self.all_entries)

        self._apply_filters()

        self.budgets = Budgets(self.entries)
        self.errors.extend(self.budgets.errors)
Exemplo n.º 5
0
def get_matching_entries(entries, options_map, query):
    query_text = 'SELECT * ' + query
    parser = query_parser.Parser()
    parsed_query = parser.parse(query_text)
    c_from = None
    if parsed_query.from_clause:
        c_from = query_compile.compile_from(parsed_query.from_clause, query_env.FilterEntriesEnvironment())
    c_where = None
    if parsed_query.where_clause:
        c_where = query_compile.compile_expression(parsed_query.where_clause, query_env.FilterPostingsEnvironment())

    # Figure out if we need to compute balance.
    balance = None
    if c_where and query_execute.uses_balance_column(c_where):
        balance = inventory.Inventory()

    context = query_execute.RowContext()
    context.balance = balance
    

    # Initialize some global properties for use by some of the accessors.
    context.options_map = options_map
    context.account_types = options.get_account_types(options_map)
    context.open_close_map = getters.get_account_open_close(entries)
    #context.commodity_map = getters.get_commodity_map(entries)
    context.price_map = prices.build_price_map(entries) 

    if c_from is not None:
        filtered_entries = query_execute.filter_entries(c_from, entries, options_map)
    else:
        filtered_entries = entries
    return filtered_entries
Exemplo n.º 6
0
def get_assets_holdings(entries, options_map, currency=None):
    """Return holdings for all assets and liabilities.

    Args:
      entries: A list of directives.
      options_map: A dict of parsed options.
      currency: If specified, a string, the target currency to convert all
        holding values to.
    Returns:
      A list of Holding instances and a price-map.
    """
    # Compute a price map, to perform conversions.
    price_map = prices.build_price_map(entries)

    # Get the list of holdings.
    account_types = options.get_account_types(options_map)
    holdings_list = holdings.get_final_holdings(entries,
                                                (account_types.assets,
                                                 account_types.liabilities),
                                                price_map)

    # Convert holdings to a unified currency.
    if currency:
        holdings_list = holdings.convert_to_currency(price_map, currency, holdings_list)

    return holdings_list, price_map
Exemplo n.º 7
0
    def load_file(self, beancount_file_path=None):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""
        if beancount_file_path:
            self.beancount_file_path = beancount_file_path

        self.all_entries, self.errors, self.options = \
            loader.load_file(self.beancount_file_path)
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options['title']
        if self.options['render_commas']:
            self.format_string = '{:,f}'
            self.default_format_string = '{:,.2f}'
        else:
            self.format_string = '{:f}'
            self.default_format_string = '{:.2f}'
        self.dcontext = self.options['dcontext']

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))

        self.queries = self._entries_filter_type(self.all_entries, Query)

        self.all_root_account = realization.realize(self.all_entries,
                                                    self.account_types)
        self.all_accounts = self._all_accounts()
        self.all_accounts_leaf_only = self._all_accounts(leaf_only=True)

        self._apply_filters()
Exemplo n.º 8
0
def test_holdings_at_dates(load_doc):
    """
    plugin "auto_accounts"

    2016-01-01 *
      Equity:Unknown
      Assets:Cash			5000 USD

    2016-01-02 *
      Assets:Account1             15 HOOL {123 USD}
      Assets:Cash

    2016-01-03 *
      Assets:Account1             10 HOOL {130 USD}
      Assets:Cash
    """
    entries, errors, options = load_doc
    price_map = prices.build_price_map(entries)
    dates = [
        datetime.date(2016, 1, 1),
        datetime.date(2016, 1, 2),
        datetime.date(2016, 1, 3),
        datetime.date(2016, 1, 4),
    ]
    number_of_holdings = list(
        map(len, list(holdings_at_dates(entries, dates, price_map, options))))
    assert number_of_holdings == [0, 1, 2, 3]
Exemplo n.º 9
0
def load_csv_and_prices(holdings_filename, prices_filename, currency):
    """Load the holdings and prices from filenames and convert to a common currency.

    Args:
      holdings_filename: A string, the name of a CSV file containing the list of Holdings.
      prices_filename: A string, the name of a Beancount file containing price directives.
      currency: A string, the target currency to convert all the holdings to.
    Returns:
      Two lists of holdings: a list in the original currencies, and a list all
      converted to the target currency.
    """
    # Load the price database.
    # Generate with "bean-query LEDGER holdings"
    price_entries, errors, options_map = loader.load(prices_filename)
    price_map = prices.build_price_map(price_entries)

    # Load the holdings list.
    # Generate with "bean-query LEDGER print_prices"
    mixed_holdings_list = list(
        holdings_reports.load_from_csv(open(holdings_filename)))

    # Convert all the amounts to a common currency (otherwise summing market
    # values makes no sense).
    holdings_list = holdings.convert_to_currency(price_map, currency,
                                                 mixed_holdings_list)

    return mixed_holdings_list, holdings_list
Exemplo n.º 10
0
def test_holdings_with_prices(load_doc):
    """
    2013-04-05 *
      Equity:Unknown
      Assets:Cash			50000 USD

    2013-04-01 *
      Assets:Account1             15 HOOL {518.73 USD}
      Assets:Cash

    2013-06-01 price HOOL  578.02 USD

    """
    entries, _, _ = load_doc
    price_map = prices.build_price_map(entries)

    holdings = get_final_holdings(entries, ('Assets'), price_map)
    holdings = sorted(map(tuple, holdings))

    assert holdings == [
        ('Assets:Account1', A('15 HOOL'),
         Cost(D('518.73'), 'USD', datetime.date(2013, 4, 1),
              None), D('578.02'), None, None),
        ('Assets:Cash', A('42219.05 USD'), None, None, None, None),
    ]
Exemplo n.º 11
0
    def test_get_all_prices(self, entries, _, __):
        """
        2013-06-01 price  USD  1.01 CAD
        2013-06-03 price  USD  1.03 CAD
        2013-06-05 price  USD  1.05 CAD
        2013-06-07 price  USD  1.07 CAD
        2013-06-09 price  USD  1.09 CAD
        2013-06-11 price  USD  1.11 CAD
        """
        price_map = prices.build_price_map(entries)
        price_list = prices.get_all_prices(price_map, ('USD', 'CAD'))
        expected = [(datetime.date(2013, 6, 1), D('1.01')),
                    (datetime.date(2013, 6, 3), D('1.03')),
                    (datetime.date(2013, 6, 5), D('1.05')),
                    (datetime.date(2013, 6, 7), D('1.07')),
                    (datetime.date(2013, 6, 9), D('1.09')),
                    (datetime.date(2013, 6, 11), D('1.11'))]
        self.assertEqual(expected, price_list)

        inv_price_list = prices.get_all_prices(price_map, ('CAD', 'USD'))
        self.assertEqual(len(price_list), len(inv_price_list))

        # Test not found.
        with self.assertRaises(KeyError):
            prices.get_all_prices(price_map, ('EWJ', 'JPY'))
Exemplo n.º 12
0
    def test_build_price_map(self, entries, _, __):
        """
        2013-06-01 price  USD  1.10 CAD

        ;; Try some prices at the same date.
        2013-06-02 price  USD  1.11 CAD
        2013-06-02 price  USD  1.12 CAD
        2013-06-02 price  USD  1.13 CAD

        ;; One after too.
        2013-06-03 price  USD  1.14 CAD

        ;; Try a few inverse prices.
        2013-06-05 price  CAD  0.86956 USD
        2013-06-06 price  CAD  0.86207 USD
        """
        price_map = prices.build_price_map(entries)

        self.assertEqual(2, len(price_map))
        self.assertEqual(set([('USD', 'CAD'), ('CAD', 'USD')]),
                         set(price_map.keys()))

        values = price_map[('USD', 'CAD')]
        expected = [(datetime.date(2013, 6, 1), D('1.10')),
                    (datetime.date(2013, 6, 2), D('1.13')),
                    (datetime.date(2013, 6, 3), D('1.14')),
                    (datetime.date(2013, 6, 5), D('1.15')),
                    (datetime.date(2013, 6, 6), D('1.16'))]
        for (exp_date, exp_value), (act_date,
                                    act_value) in zip(expected, values):
            self.assertEqual(exp_date, act_date)
            self.assertEqual(exp_value, act_value.quantize(D('0.01')))

        self.assertEqual(5, len(price_map[('CAD', 'USD')]))
Exemplo n.º 13
0
    def load_file(self):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""
        self.all_entries, self.errors, self.options = \
            loader.load_file(self.beancount_file_path)
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options['title']
        if self.options['render_commas']:
            self.format_string = '{:,f}'
            self.default_format_string = '{:,.2f}'
        else:
            self.format_string = '{:f}'
            self.default_format_string = '{:.2f}'

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))

        self.queries = _filter_entries_by_type(self.all_entries, Query)

        self.all_root_account = realization.realize(self.all_entries,
                                                    self.account_types)
        self.all_accounts = _list_accounts(self.all_root_account)
        self.all_accounts_leaf_only = _list_accounts(
            self.all_root_account, leaf_only=True)

        self.sidebar_links = _sidebar_links(self.all_entries)

        self._apply_filters()

        self.budgets = Budgets(self.entries)
        self.errors.extend(self.budgets.errors)
Exemplo n.º 14
0
def compute_returns_with_regexp(entries,
                                options_map,
                                transfer_account,
                                assets_regexp,
                                intflows_regexp,
                                internalize_regexp=None,
                                date_begin=None,
                                date_end=None):
    """Compute the returns of a portfolio of accounts defined by a regular expression.

    Args:
      entries: A list of directives.
      options_map: An options dict as produced by the loader.
      transfer_account: A string, the name of an account to use for internalizing entries
        which need to be split between internal and external flows.
      assets_regexp: A regular expression string that matches names of asset accounts to
        value for the portfolio.
      intflows_regexp: A regular expression string that matches names of accounts considered
        internal flows to the portfolio (typically income and expenses accounts).
      internalize_regexp: A regular expression string that matches names of accounts
        to force internalization of. See internalize() for details.
      date_begin: A datetime.date instance, the beginning date of the period to compute
        returns over.
      date_end: A datetime.date instance, the end date of the period to compute returns
        over.
    Returns:
      See compute_returns().
    """
    acc_types = options.get_account_types(options_map)
    price_map = prices.build_price_map(entries)

    # Fetch the matching entries and figure out account name groups.
    matching_entries, (accounts_value, accounts_intflows, accounts_extflows,
                       accounts_internalize) = find_matching(
                           entries, acc_types, assets_regexp, intflows_regexp,
                           internalize_regexp)

    logging.info('Asset accounts:')
    for account in sorted(accounts_value):
        logging.info('  %s', account)

    logging.info('Internal flows:')
    for account in sorted(accounts_intflows):
        logging.info('  %s', account)

    logging.info('External flows:')
    for account in sorted(accounts_extflows):
        logging.info('  %s', account)
    logging.info('')

    if accounts_internalize:
        logging.info('Explicitly internalized accounts:')
        for account in sorted(accounts_internalize):
            logging.info('  %s', account)
        logging.info('')

    return compute_returns(entries, transfer_account, accounts_value,
                           accounts_intflows, accounts_internalize, price_map,
                           date_begin, date_end)
Exemplo n.º 15
0
 def render_beancount(self, entries, errors, options_map, file):
     dcontext = options_map['dcontext']
     price_map = prices.build_price_map(entries)
     meta = data.new_metadata('<report_prices_db>', 0)
     for base_quote in price_map.forward_pairs:
         price_list = price_map[base_quote]
         base, quote = base_quote
         for date, price in price_list:
             entry = data.Price(meta, date, base, amount.Amount(price, quote))
             file.write(printer.format_entry(entry, dcontext))
         file.write('\n')
Exemplo n.º 16
0
 def test_convert_amount(self, entries, _, __):
     """
     2013-07-01 price  USD  1.20 CAD
     """
     price_map = prices.build_price_map(entries)
     self.assertEqual(A('120 CAD'),
                      prices.convert_amount(price_map, 'CAD', A('100 USD')))
     self.assertEqual(A('100 CAD'),
                      prices.convert_amount(price_map, 'CAD', A('100 CAD')))
     self.assertEqual(None,
                      prices.convert_amount(price_map, 'EUR', A('100 USD')))
Exemplo n.º 17
0
    def test_lookup_price_and_inverse(self, entries, _, __):
        """
        2013-06-01 price  USD  1.01 CAD
        """
        price_map = prices.build_price_map(entries)

        # Ensure that the forward exception includes the forward detail.
        try:
            prices._lookup_price_and_inverse(price_map, ('EUR', 'USD'))
            self.fail("Exception not raised")
        except KeyError as exc:
            self.assertTrue(re.search("('EUR', 'USD')", str(exc)))
Exemplo n.º 18
0
    def load_file(self):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""
        # use the internal function to disable cache
        if not self.is_encrypted:
            self.all_entries, self.errors, self.options = \
                loader._load([(self.beancount_file_path, True)],
                             None, None, None)
            include_path = os.path.dirname(self.beancount_file_path)
            self.watcher.update(self.options['include'], [
                os.path.join(include_path, path)
                for path in self.options['documents']])
        else:
            self.all_entries, self.errors, self.options = \
                loader.load_file(self.beancount_file_path)
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options['title']
        if self.options['render_commas']:
            self._format_string = '{:,f}'
            self._default_format_string = '{:,.2f}'
        else:
            self._format_string = '{:f}'
            self._default_format_string = '{:.2f}'

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))

        self.queries = _filter_entries_by_type(self.all_entries, Query)
        self.custom_entries = _filter_entries_by_type(self.all_entries, Custom)

        self.all_root_account = realization.realize(self.all_entries,
                                                    self.account_types)
        self.all_accounts = _list_accounts(self.all_root_account)
        self.all_accounts_active = _list_accounts(
            self.all_root_account, active_only=True)

        self.fava_options, errors = parse_options(self.custom_entries)
        self.errors.extend(errors)

        self.sidebar_links = _sidebar_links(self.custom_entries)

        self.upcoming_events = _upcoming_events(
            self.all_entries, self.fava_options['upcoming-events'])

        self.budgets, errors = parse_budgets(self.custom_entries)
        self.errors.extend(errors)

        self._apply_filters()
Exemplo n.º 19
0
    def test_get_price(self, entries, _, __):
        """
        2013-06-01 price  USD  1.00 CAD
        2013-06-10 price  USD  1.50 CAD
        2013-07-01 price  USD  2.00 CAD
        """
        price_map = prices.build_price_map(entries)

        date, price = prices.get_price(price_map, 'USD/CAD',
                                       datetime.date(2013, 5, 15))
        self.assertEqual(None, price)
        self.assertEqual(None, date)

        date, price = prices.get_price(price_map, 'USD/CAD',
                                       datetime.date(2013, 6, 1))
        self.assertEqual(D('1.00'), price)
        self.assertEqual(datetime.date(2013, 6, 1), date)

        date, price = prices.get_price(price_map, 'USD/CAD',
                                       datetime.date(2013, 6, 5))
        self.assertEqual(D('1.00'), price)
        self.assertEqual(datetime.date(2013, 6, 1), date)

        date, price = prices.get_price(price_map, 'USD/CAD',
                                       datetime.date(2013, 6, 10))
        self.assertEqual(D('1.50'), price)
        self.assertEqual(datetime.date(2013, 6, 10), date)

        date, price = prices.get_price(price_map, 'USD/CAD',
                                       datetime.date(2013, 6, 20))
        self.assertEqual(D('1.50'), price)
        self.assertEqual(datetime.date(2013, 6, 10), date)

        date, price = prices.get_price(price_map, 'USD/CAD',
                                       datetime.date(2013, 7, 1))
        self.assertEqual(D('2.00'), price)
        self.assertEqual(datetime.date(2013, 7, 1), date)

        date, price = prices.get_price(price_map, 'USD/CAD',
                                       datetime.date(2013, 7, 15))
        self.assertEqual(D('2.00'), price)
        self.assertEqual(datetime.date(2013, 7, 1), date)

        # With no date, should devolved to get_latest_price().
        date, price = prices.get_price(price_map, 'USD/CAD', None)
        self.assertEqual(D('2.00'), price)
        self.assertEqual(datetime.date(2013, 7, 1), date)

        # Test not found.
        result = prices.get_price(price_map, ('EWJ', 'JPY'))
        self.assertEqual((None, None), result)
Exemplo n.º 20
0
def test_holdings_zero_position(load_doc):
    """
    2012-07-02 ! "I received 1 new share in dividend, without paying"
      Assets:Stocks:NYA 1 NYA {0 EUR}
      Income:Dividends:NYA -0 EUR

    2014-11-13 balance Assets:Stocks:NYA 1 NYA
    """
    entries, _, _ = load_doc
    price_map = prices.build_price_map(entries)
    holdings = get_final_holdings(entries, ('Assets', 'Liabilities'),
                                  price_map)
    assert len(holdings) == 1
    assert holdings[0].cost.currency == 'EUR'
Exemplo n.º 21
0
    def test_get_latest_price(self, entries, _, __):
        """
        2013-06-01 price  USD  1.01 CAD
        2013-06-09 price  USD  1.09 CAD
        2013-06-11 price  USD  1.11 CAD
        """
        price_map = prices.build_price_map(entries)
        price_list = prices.get_latest_price(price_map, ('USD', 'CAD'))
        expected = (datetime.date(2013, 6, 11), D('1.11'))
        self.assertEqual(expected, price_list)

        # Test not found.
        result = prices.get_latest_price(price_map, ('EWJ', 'JPY'))
        self.assertEqual((None, None), result)
Exemplo n.º 22
0
def test_holdings_zero_position(load_doc):
    """
    2012-07-02 ! "I received 1 new share in dividend, without paying"
      Assets:Stocks:NYA 1 NYA {0 EUR}
      Income:Dividends:NYA -0 EUR

    2014-11-13 balance Assets:Stocks:NYA 1 NYA
    """
    entries, _, _ = load_doc
    price_map = prices.build_price_map(entries)
    holdings = get_final_holdings(entries, ('Assets', 'Liabilities'),
                                  price_map)
    assert len(holdings) == 1
    assert holdings[0].cost.currency == 'EUR'
Exemplo n.º 23
0
 def get_date_rates(self, entries):
     if not self.args.commodity:
         self.parser.error("Commodity pair must be specified (in BASE/QUOTE format)")
     if not re.match('{ccy}/{ccy}$'.format(ccy=amount.CURRENCY_RE),
                     self.args.commodity):
         self.parser.error(('Invalid commodity pair "{}"; '
                            'must be in BASE/QUOTE format').format(self.args.commodity))
     price_map = prices.build_price_map(entries)
     try:
         date_rates = prices.get_all_prices(price_map, self.args.commodity)
     except KeyError:
         self.parser.error(
             "Commodity not present in database: {}".format(self.args.commodity))
     return date_rates
Exemplo n.º 24
0
    def load_file(self):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""
        # use the internal function to disable cache
        if not self.is_encrypted:
            self.all_entries, self.errors, self.options = loader._load(
                [(self.beancount_file_path, True)], None, None, None
            )
            include_path = os.path.dirname(self.beancount_file_path)
            self.watcher.update(
                self.options["include"], [os.path.join(include_path, path) for path in self.options["documents"]]
            )
        else:
            self.all_entries, self.errors, self.options = loader.load_file(self.beancount_file_path)
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options["title"]
        if self.options["render_commas"]:
            self._format_string = "{:,f}"
            self._default_format_string = "{:,.2f}"
        else:
            self._format_string = "{:f}"
            self._default_format_string = "{:.2f}"

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))

        self.queries = _filter_entries_by_type(self.all_entries, Query)
        self.custom_entries = _filter_entries_by_type(self.all_entries, Custom)

        self.all_root_account = realization.realize(self.all_entries, self.account_types)
        self.all_accounts = _list_accounts(self.all_root_account)
        self.all_accounts_active = _list_accounts(self.all_root_account, active_only=True)

        self.fava_options, errors = parse_options(self.custom_entries)
        self.errors.extend(errors)

        self.sidebar_links = _sidebar_links(self.custom_entries)

        self.upcoming_events = _upcoming_events(self.all_entries, self.fava_options["upcoming-events"])

        self.budgets, errors = parse_budgets(self.custom_entries)
        self.errors.extend(errors)

        self._apply_filters()
Exemplo n.º 25
0
    def wrapper(*posargs, **kwargs):
        filename = app.args.filename
        mtime = path.getmtime(filename)
        if mtime > app.last_mtime:
            app.last_mtime = mtime

            logging.info('Reloading...')

            # Save the source for later, to render.
            with open(filename, encoding='utf8') as f:
                app.source = f.read()

            # Parse the beancount file.
            entries, errors, options_map = loader.load_file(filename)

            # Print out the list of errors.
            if errors:
                request.params['render_overlay'] = True
                print(
                    ',----------------------------------------------------------------'
                )
                printer.print_errors(errors, file=sys.stdout)
                print(
                    '`----------------------------------------------------------------'
                )

            # Save globals in the global app.
            app.entries = entries
            app.errors = errors
            app.options = options_map
            app.account_types = options.get_account_types(options_map)

            # Pre-compute the price database.
            app.price_map = prices.build_price_map(app.entries)

            # Reset the view cache.
            app.views.clear()

        else:
            # For now, the overlay is a link to the errors page. Always render
            # it on the right when there are errors.
            if app.errors:
                request.params['render_overlay'] = True

        return callback(*posargs, **kwargs)
Exemplo n.º 26
0
    def test_get_final_holdings__zero_position(self, entries, _, __):
        """
        1970-01-01 open Assets:Stocks:NYA
        1970-01-01 open Expenses:Financial:Commissions
        1970-01-01 open Assets:Current
        1970-01-01 open Income:Dividends:NYA

        2012-07-02 ! "I received 1 new share in dividend, without paying"
          Assets:Stocks:NYA 1 NYA {0 EUR}
          Income:Dividends:NYA -0 EUR

        2014-11-13 balance Assets:Stocks:NYA 1 NYA
        """
        price_map = prices.build_price_map(entries)
        holdings_list = holdings.get_final_holdings(entries,
                                                    ('Assets', 'Liabilities'),
                                                    price_map)
        self.assertEqual(1, len(holdings_list))
        self.assertEqual('EUR', holdings_list[0].cost_currency)
Exemplo n.º 27
0
    def setUp(self, entries, _, __):
        """
        2013-06-01 price  USD  1.01 CAD
        2013-06-05 price  USD  1.05 CAD
        2013-06-06 price  USD  1.06 CAD
        2013-06-07 price  USD  1.07 CAD
        2013-06-10 price  USD  1.10 CAD

        2013-06-01 price  HOOL  101.00 USD
        2013-06-05 price  HOOL  105.00 USD
        2013-06-06 price  HOOL  106.00 USD
        2013-06-07 price  HOOL  107.00 USD
        2013-06-10 price  HOOL  110.00 USD

        2013-06-01 price  AAPL  91.00 USD
        2013-06-05 price  AAPL  95.00 USD
        2013-06-06 price  AAPL  96.00 USD
        2013-06-07 price  AAPL  97.00 USD
        2013-06-10 price  AAPL  90.00 USD
        """
        self.price_map = prices.build_price_map(entries)
Exemplo n.º 28
0
    def test_ordering_same_date(self, entries, _, __):
        """
        ;; The last one to appear in the file should be selected.
        2013-06-02 price  USD  1.13 CAD
        2013-06-02 price  USD  1.12 CAD
        2013-06-02 price  USD  1.11 CAD
        """
        price_map = prices.build_price_map(entries)

        self.assertEqual(2, len(price_map))
        self.assertEqual(set([('USD', 'CAD'), ('CAD', 'USD')]),
                         set(price_map.keys()))

        values = price_map[('USD', 'CAD')]
        expected = [(datetime.date(2013, 6, 2), D('1.11'))]
        for (exp_date, exp_value), (act_date,
                                    act_value) in zip(expected, values):
            self.assertEqual(exp_date, act_date)
            self.assertEqual(exp_value, act_value.quantize(D('0.01')))

        self.assertEqual(1, len(price_map[('CAD', 'USD')]))
Exemplo n.º 29
0
    def load_file(self, beancount_file_path=None):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""
        if beancount_file_path:
            self.beancount_file_path = beancount_file_path

        self.all_entries, self.errors, self.options = \
            loader.load_file(self.beancount_file_path)
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options['title']
        if self.options['render_commas']:
            self.format_string = '{:,f}'
            self.default_format_string = '{:,.2f}'
        else:
            self.format_string = '{:f}'
            self.default_format_string = '{:.2f}'

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))

        self.queries = self._entries_filter_type(self.all_entries, Query)

        self.all_root_account = realization.realize(self.all_entries,
                                                    self.account_types)
        self.all_accounts = self._all_accounts()
        self.all_accounts_leaf_only = self._all_accounts(leaf_only=True)

        self.sidebar_link_entries = [entry for entry in self.all_entries
                                     if isinstance(entry, Custom) and
                                     entry.type == 'fava-sidebar-link']

        self._apply_filters()

        self.budgets = Budgets(self.entries)
        self.errors.extend(self.budgets.errors)
Exemplo n.º 30
0
    def load_file(self):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""

        self.entries, self._errors, self.options = loader.load_file(self.beancount_file_path)
        self.all_entries = self.entries
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options['title']

        self.errors = []
        for error in self._errors:
            self.errors.append({
                'file': error.source['filename'],
                'line': error.source['lineno'],
                'error': error.message
            })

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))
        self.apply_filters()
Exemplo n.º 31
0
    def load_file(self):
        """Load self.beancount_file_path and compute things that are independent
        of how the entries might be filtered later"""

        self.entries, self._errors, self.options = loader.load_file(
            self.beancount_file_path)
        self.all_entries = self.entries
        self.price_map = prices.build_price_map(self.all_entries)
        self.account_types = options.get_account_types(self.options)

        self.title = self.options['title']

        self.errors = []
        for error in self._errors:
            self.errors.append({
                'file': error.source['filename'],
                'line': error.source['lineno'],
                'error': error.message
            })

        self.active_years = list(getters.get_active_years(self.all_entries))
        self.active_tags = list(getters.get_all_tags(self.all_entries))
        self.active_payees = list(getters.get_all_payees(self.all_entries))
        self.apply_filters()
Exemplo n.º 32
0
def test_holdings_with_prices(load_doc):
    """
    2013-04-05 *
      Equity:Unknown
      Assets:Cash			50000 USD

    2013-04-01 *
      Assets:Account1             15 HOOL {518.73 USD}
      Assets:Cash

    2013-06-01 price HOOL  578.02 USD

    """
    entries, _, _ = load_doc
    price_map = prices.build_price_map(entries)

    holdings = get_final_holdings(entries, ('Assets'), price_map)
    holdings = sorted(map(tuple, holdings))

    assert holdings == [
        ('Assets:Account1', A('15 HOOL'), Cost(D('518.73'), 'USD', None, None),
         D('578.02'), None, None),
        ('Assets:Cash', A('42219.05 USD'), None, None, None, None),
    ]
Exemplo n.º 33
0
def execute_query(query, entries, options_map):
    """Given a compiled select statement, execute the query.

    Args:
      query: An instance of a query_compile.Query
      entries: A list of directives.
      options_map: A parser's option_map.
    Returns:
      A pair of:
        result_types: A list of (name, data-type) item pairs.
        result_rows: A list of ResultRow tuples of length and types described by
          'result_types'.
    """
    # Filter the entries using the WHERE clause.
    filt_entries = (filter_entries(query.c_from, entries, options_map)
                    if query.c_from is not None else entries)

    # Figure out the result types that describe what we return.
    result_types = [(target.name, target.c_expr.dtype)
                    for target in query.c_targets if target.name is not None]

    # Create a class for each final result.
    # pylint: disable=invalid-name
    ResultRow = collections.namedtuple(
        'ResultRow',
        [target.name for target in query.c_targets if target.name is not None])

    # Pre-compute lists of the expressions to evaluate.
    group_indexes = (set(query.group_indexes) if query.group_indexes
                     is not None else query.group_indexes)

    # Indexes of the columns for result rows and order rows.
    result_indexes = [
        index for index, c_target in enumerate(query.c_targets)
        if c_target.name
    ]
    order_indexes = query.order_indexes

    # Figure out if we need to compute balance.
    balance = None
    if any(
            uses_balance_column(c_expr) for c_expr in
            itertools.chain([c_target.c_expr for c_target in query.c_targets],
                            [query.c_where] if query.c_where else [])):
        balance = inventory.Inventory()

    # Create the context container which we will use to evaluate rows.
    context = RowContext()
    context.balance = balance

    # Initialize some global properties for use by some of the accessors.
    context.options_map = options_map
    context.account_types = options.get_account_types(options_map)
    context.open_close_map = getters.get_account_open_close(entries)
    context.price_map = prices.build_price_map(entries)

    # Dispatch between the non-aggregated queries and aggregated queries.
    c_where = query.c_where
    schwartz_rows = []
    if query.group_indexes is None:
        # This is a non-aggregated query.

        # Precompute a list of expressions to be evaluated, and of indexes
        # within it for the result rows and the order keys.
        c_target_exprs = [c_target.c_expr for c_target in query.c_targets]

        # Iterate over all the postings once and produce schwartzian rows.
        for entry in filt_entries:
            if isinstance(entry, data.Transaction):
                context.entry = entry
                for posting in entry.postings:
                    context.posting = posting
                    if c_where is None or c_where(context):
                        # Compute the balance.
                        if balance is not None:
                            balance.add_position(posting.position)

                        # Evaluate all the values.
                        values = [c_expr(context) for c_expr in c_target_exprs]

                        # Compute result and sort-key objects.
                        result = ResultRow._make(values[index]
                                                 for index in result_indexes)
                        sortkey = (tuple(values[index]
                                         for index in order_indexes)
                                   if order_indexes is not None else None)
                        schwartz_rows.append((sortkey, result))
    else:
        # This is an aggregated query.

        # Precompute lists of non-aggregate and aggregate expressions to
        # evaluate. For aggregate targets, we hunt down the aggregate
        # sub-expressions to evaluate, to avoid recursion during iteration.
        c_nonaggregate_exprs = []
        c_aggregate_exprs = []
        for index, c_target in enumerate(query.c_targets):
            c_expr = c_target.c_expr
            if index in group_indexes:
                c_nonaggregate_exprs.append(c_expr)
            else:
                _, aggregate_exprs = query_compile.get_columns_and_aggregates(
                    c_expr)
                c_aggregate_exprs.extend(aggregate_exprs)
        # Note: it is possible that there are no aggregates to compute here. You could
        # have all columns be non-aggregates and group-by the entire list of columns.

        # Pre-allocate handles in aggregation nodes.
        allocator = Allocator()
        for c_expr in c_aggregate_exprs:
            c_expr.allocate(allocator)

        # Iterate over all the postings to evaluate the aggregates.
        agg_store = {}
        for entry in filt_entries:
            if isinstance(entry, data.Transaction):
                context.entry = entry
                for posting in entry.postings:
                    context.posting = posting
                    if c_where is None or c_where(context):
                        # Compute the balance.
                        if balance is not None:
                            balance.add_position(posting.position)

                        # Compute the non-aggregate expressions.
                        row_key = tuple(
                            c_expr(context) for c_expr in c_nonaggregate_exprs)

                        # Get an appropriate store for the unique key of this row.
                        try:
                            store = agg_store[row_key]
                        except KeyError:
                            # This is a row; create a new store.
                            store = allocator.create_store()
                            for c_expr in c_aggregate_exprs:
                                c_expr.initialize(store)
                            agg_store[row_key] = store

                        # Update the aggregate expressions.
                        for c_expr in c_aggregate_exprs:
                            c_expr.update(store, context)

        # Iterate over all the aggregations to produce the schwartzian rows.
        for key, store in agg_store.items():
            key_iter = iter(key)
            values = []

            # Finalize the store.
            for c_expr in c_aggregate_exprs:
                c_expr.finalize(store)
            context.store = store

            for index, c_target in enumerate(query.c_targets):
                if index in group_indexes:
                    value = next(key_iter)
                else:
                    value = c_target.c_expr(context)
                values.append(value)

            # Compute result and sort-key objects.
            result = ResultRow._make(values[index] for index in result_indexes)
            sortkey = (tuple(values[index] for index in order_indexes)
                       if order_indexes is not None else None)
            schwartz_rows.append((sortkey, result))

    # Order results if requested.
    if order_indexes is not None:
        schwartz_rows.sort(key=lambda x: x[0],
                           reverse=(query.ordering == 'DESC'))

    # Extract final results, in sorted order at this point.
    result_rows = [x[1] for x in schwartz_rows]

    # Apply distinct.
    if query.distinct:
        result_rows = list(misc_utils.uniquify(result_rows))

    # Apply limit.
    if query.limit is not None:
        result_rows = result_rows[:query.limit]

    # Flatten inventories if requested.
    if query.flatten:
        result_types, result_rows = flatten_results(result_types, result_rows)

    return (result_types, result_rows)
Exemplo n.º 34
0
def add_unrealized_gains(entries, options_map, subaccount=None):
    """Insert entries for unrealized capital gains.

    This function inserts entries that represent unrealized gains, at the end of
    the available history. It returns a new list of entries, with the new gains
    inserted. It replaces the account type with an entry in an income account.
    Optionally, it can book the gain in a subaccount of the original and income
    accounts.

    Args:
      entries: A list of data directives.
      options_map: A dict of options, that confirms to beancount.parser.options.
      subaccount: A string, and optional the name of a subaccount to create
        under an account to book the unrealized gain. If this is left to its
        default value, the gain is booked directly in the same account.
    Returns:
      A list of entries, which includes the new unrealized capital gains entries
      at the end, and a list of errors. The new list of entries is still sorted.
    """
    errors = []
    meta = data.new_metadata('<unrealized_gains>', 0)

    account_types = options.get_account_types(options_map)

    # Assert the subaccount name is in valid format.
    if subaccount:
        validation_account = account.join(account_types.assets, subaccount)
        if not account.is_valid(validation_account):
            errors.append(
                UnrealizedError(
                    meta, "Invalid subaccount name: '{}'".format(subaccount),
                    None))
            return entries, errors

    if not entries:
        return (entries, errors)

    # Group positions by (account, cost, cost_currency).
    price_map = prices.build_price_map(entries)

    new_entries = []

    # Start at the first month after our first transaction
    date = date_utils.next_month(entries[0].date)
    last_month = date_utils.next_month(entries[-1].date)
    last_holdings_with_currencies = None
    while date <= last_month:
        date_entries, holdings_with_currencies, date_errors = add_unrealized_gains_at_date(
            entries, new_entries, account_types.income, price_map, date, meta,
            subaccount)
        new_entries.extend(date_entries)
        errors.extend(date_errors)

        if last_holdings_with_currencies:
            for account_, cost_currency, currency in last_holdings_with_currencies - holdings_with_currencies:
                # Create a negation transaction specifically to mark that all gains have been realized
                if subaccount:
                    account_ = account.join(account_, subaccount)

                latest_unrealized_entry = find_previous_unrealized_transaction(
                    new_entries, account_, cost_currency, currency)
                if not latest_unrealized_entry:
                    continue
                entry = data.Transaction(
                    data.new_metadata(meta["filename"],
                                      lineno=999,
                                      kvlist={'prev_currency': currency}),
                    date, flags.FLAG_UNREALIZED, None,
                    'Clear unrealized gains/losses of {}'.format(currency),
                    set(), set(), [])

                # Negate the previous transaction because of unrealized gains are now 0
                for posting in latest_unrealized_entry.postings[:2]:
                    entry.postings.append(
                        data.Posting(posting.account, -posting.units, None,
                                     None, None, None))
                new_entries.append(entry)

        last_holdings_with_currencies = holdings_with_currencies
        date = date_utils.next_month(date)

    # Ensure that the accounts we're going to use to book the postings exist, by
    # creating open entries for those that we generated that weren't already
    # existing accounts.
    new_accounts = {
        posting.account
        for entry in new_entries for posting in entry.postings
    }
    open_entries = getters.get_account_open_close(entries)
    new_open_entries = []
    for index, account_ in enumerate(sorted(new_accounts)):
        if account_ not in open_entries:
            meta = data.new_metadata(meta["filename"], index)
            open_entry = data.Open(meta, new_entries[0].date, account_, None,
                                   None)
            new_open_entries.append(open_entry)

    return (entries + new_open_entries + new_entries, errors)
Exemplo n.º 35
0
def add_unrealized_gains(entries, options_map, subaccount=None):
    """Insert entries for unrealized capital gains.

    This function inserts entries that represent unrealized gains, at the end of
    the available history. It returns a new list of entries, with the new gains
    inserted. It replaces the account type with an entry in an income account.
    Optionally, it can book the gain in a subaccount of the original and income
    accounts.

    Args:
      entries: A list of data directives.
      options_map: A dict of options, that confirms to beancount.parser.options.
      subaccount: A string, and optional the name of a subaccount to create
        under an account to book the unrealized gain. If this is left to its
        default value, the gain is booked directly in the same account.
    Returns:
      A list of entries, which includes the new unrealized capital gains entries
      at the end, and a list of errors. The new list of entries is still sorted.
    """
    errors = []
    meta = data.new_metadata('<unrealized_gains>', 0)

    account_types = options.get_account_types(options_map)

    # Assert the subaccount name is in valid format.
    if subaccount:
        validation_account = account.join(account_types.assets, subaccount)
        if not account.is_valid(validation_account):
            errors.append(
                UnrealizedError(
                    meta, "Invalid subaccount name: '{}'".format(subaccount),
                    None))
            return entries, errors

    if not entries:
        return (entries, errors)

    # Group positions by (account, cost, cost_currency).
    price_map = prices.build_price_map(entries)
    holdings_list = holdings.get_final_holdings(entries, price_map=price_map)

    # Group positions by (account, cost, cost_currency).
    holdings_list = holdings.aggregate_holdings_by(
        holdings_list, lambda h: (h.account, h.currency, h.cost_currency))

    # Get the latest prices from the entries.
    price_map = prices.build_price_map(entries)

    # Create transactions to account for each position.
    new_entries = []
    latest_date = entries[-1].date
    for index, holding in enumerate(holdings_list):
        if (holding.currency == holding.cost_currency
                or holding.cost_currency is None):
            continue

        # Note: since we're only considering positions held at cost, the
        # transaction that created the position *must* have created at least one
        # price point for that commodity, so we never expect for a price not to
        # be available, which is reasonable.
        if holding.price_number is None:
            # An entry without a price might indicate that this is a holding
            # resulting from leaked cost basis. {0ed05c502e63, b/16}
            if holding.number:
                errors.append(
                    UnrealizedError(
                        meta,
                        "A valid price for {h.currency}/{h.cost_currency} "
                        "could not be found".format(h=holding), None))
            continue

        # Compute the PnL; if there is no profit or loss, we create a
        # corresponding entry anyway.
        pnl = holding.market_value - holding.book_value
        if holding.number == ZERO:
            # If the number of units sum to zero, the holdings should have been
            # zero.
            errors.append(
                UnrealizedError(
                    meta,
                    "Number of units of {} in {} in holdings sum to zero "
                    "for account {} and should not".format(
                        holding.currency, holding.cost_currency,
                        holding.account), None))
            continue

        # Compute the name of the accounts and add the requested subaccount name
        # if requested.
        asset_account = holding.account
        income_account = account.join(account_types.income,
                                      account.sans_root(holding.account))
        if subaccount:
            asset_account = account.join(asset_account, subaccount)
            income_account = account.join(income_account, subaccount)

        # Create a new transaction to account for this difference in gain.
        gain_loss_str = "gain" if pnl > ZERO else "loss"
        narration = (
            "Unrealized {} for {h.number} units of {h.currency} "
            "(price: {h.price_number:.4f} {h.cost_currency} as of {h.price_date}, "
            "average cost: {h.cost_number:.4f} {h.cost_currency})").format(
                gain_loss_str, h=holding)
        entry = data.Transaction(
            data.new_metadata(meta["filename"],
                              lineno=1000 + index), latest_date,
            flags.FLAG_UNREALIZED, None, narration, None, None, [])

        # Book this as income, converting the account name to be the same, but as income.
        # Note: this is a rather convenient but arbitraty choice--maybe it would be best to
        # let the user decide to what account to book it, but I don't a nice way to let the
        # user specify this.
        #
        # Note: we never set a price because we don't want these to end up in Conversions.
        entry.postings.extend([
            data.Posting(
                asset_account,
                position.Position(
                    position.Lot(holding.cost_currency, None, None), pnl),
                None, None, None),
            data.Posting(
                income_account,
                position.Position(
                    position.Lot(holding.cost_currency, None, None), -pnl),
                None, None, None)
        ])

        new_entries.append(entry)

    # Ensure that the accounts we're going to use to book the postings exist, by
    # creating open entries for those that we generated that weren't already
    # existing accounts.
    new_accounts = {
        posting.account
        for entry in new_entries for posting in entry.postings
    }
    open_entries = getters.get_account_open_close(entries)
    new_open_entries = []
    for account_ in sorted(new_accounts):
        if account_ not in open_entries:
            meta = data.new_metadata(meta["filename"], index)
            open_entry = data.Open(meta, latest_date, account_, None, None)
            new_open_entries.append(open_entry)

    return (entries + new_open_entries + new_entries, errors)
Exemplo n.º 36
0
 def generate_table(self, entries, errors, options_map):
     price_map = prices.build_price_map(entries)
     return table.create_table([(base_quote,)
                                for base_quote in sorted(price_map.forward_pairs)],
                               [(0, "Base/Quote", self.formatter.render_commodity)])
def add_unrealized_gains(entries, options_map, subaccount=None):
    """Insert entries for unrealized capital gains.

    This function inserts entries that represent unrealized gains, at the end of
    the available history. It returns a new list of entries, with the new gains
    inserted. It replaces the account type with an entry in an income account.
    Optionally, it can book the gain in a subaccount of the original and income
    accounts.

    Args:
      entries: A list of data directives.
      options_map: A dict of options, that confirms to beancount.parser.options.
      subaccount: A string, and optional the name of a subaccount to create
        under an account to book the unrealized gain. If this is left to its
        default value, the gain is booked directly in the same account.
    Returns:
      A list of entries, which includes the new unrealized capital gains entries
      at the end, and a list of errors. The new list of entries is still sorted.
    """
    errors = []
    meta = data.new_metadata('<unrealized_gains>', 0)

    account_types = options.get_account_types(options_map)

    # Assert the subaccount name is in valid format.
    if subaccount:
        validation_account = account.join(account_types.assets, subaccount)
        if not account.is_valid(validation_account):
            errors.append(
                UnrealizedError(meta,
                                "Invalid subaccount name: '{}'".format(subaccount),
                                None))
            return entries, errors

    if not entries:
        return (entries, errors)

    # Group positions by (account, cost, cost_currency).
    price_map = prices.build_price_map(entries)

    new_entries = []

    # Start at the first month after our first transaction
    date = date_utils.next_month(entries[0].date)
    last_month = date_utils.next_month(entries[-1].date)
    last_holdings_with_currencies = None
    while date <= last_month:
        date_entries, holdings_with_currencies, date_errors = add_unrealized_gains_at_date(
            entries, new_entries, account_types.income, price_map, date, meta,
            subaccount)
        new_entries.extend(date_entries)
        errors.extend(date_errors)

        if last_holdings_with_currencies:
            for account_, cost_currency, currency in last_holdings_with_currencies - holdings_with_currencies:
                # Create a negation transaction specifically to mark that all gains have been realized
                if subaccount:
                    account_ = account.join(account_, subaccount)

                latest_unrealized_entry = find_previous_unrealized_transaction(new_entries, account_, cost_currency, currency)
                if not latest_unrealized_entry:
                    continue
                entry = data.Transaction(data.new_metadata(meta["filename"], lineno=999,
                                         kvlist={'prev_currency': currency}), date,
                                         flags.FLAG_UNREALIZED, None, 'Clear unrealized gains/losses of {}'.format(currency), set(), set(), [])

                # Negate the previous transaction because of unrealized gains are now 0
                for posting in latest_unrealized_entry.postings[:2]:
                    entry.postings.append(
                        data.Posting(
                            posting.account,
                            -posting.units,
                            None,
                            None,
                            None,
                            None))
                new_entries.append(entry)


        last_holdings_with_currencies = holdings_with_currencies
        date = date_utils.next_month(date)

    # Ensure that the accounts we're going to use to book the postings exist, by
    # creating open entries for those that we generated that weren't already
    # existing accounts.
    new_accounts = {posting.account
                    for entry in new_entries
                    for posting in entry.postings}
    open_entries = getters.get_account_open_close(entries)
    new_open_entries = []
    for index, account_ in enumerate(sorted(new_accounts)):
        if account_ not in open_entries:
            meta = data.new_metadata(meta["filename"], index)
            open_entry = data.Open(meta, new_entries[0].date, account_, None, None)
            new_open_entries.append(open_entry)

    return (entries + new_open_entries + new_entries, errors)
Exemplo n.º 38
0
def compute_returns(entries,
                    transfer_account,
                    accounts_value,
                    accounts_intflows,
                    accounts_internalize=None,
                    price_map=None,
                    date_begin=None,
                    date_end=None):
    """Compute the returns of a portfolio of accounts.

    Args:
      entries: A list of directives that may affect the account.
      transfer_account: A string, the name of an account to use for internalizing entries
        which need to be split between internal and external flows. A good default value
        would be an equity account, 'Equity:Internalized' or something like that.
      accounts_value: A set of account name strings, the names of the asset accounts
        included in valuing the portfolio.
      accounts_intflows: A set of account name strings, the names of internal flow
        accounts (normally income and expenses) that aren't external flows.
      accounts_internalize: A set of account name strings used to force internalization.
        See internalize() for details.
      price_map: An instance of PriceMap as computed by prices.build_price_map(). If left
        to its default value of None, we derive the price_map from the entries themselves.
      date_begin: A datetime.date instance, the beginning date of the period to compute
        returns over.
      date_end: A datetime.date instance, the end date of the period to compute returns
        over.
    Returns:
      A triple of
        returns: A dict of currency -> float total returns.
        dates: A pair of (date_first, date_last) datetime.date instances.
        internalized_entries: A short list of the entries that were required to be split
          up in order to internalize their flow. (This is mostly returns to be used by
          tests, you can otherwise safely discard this.)
    """
    if not accounts_value:
        raise ValueError(
            "Cannot calculate returns without assets accounts to value")

    if price_map is None:
        price_map = prices.build_price_map(entries)

    # Remove unrealized entries, if any are found. (Note that unrealized gains
    # only inserted at the end of the list of entries have no effect because
    # this module never creates a period after these. This may change in the future).
    entries = [
        entry for entry in entries
        if not (isinstance(entry, data.Transaction)
                and entry.flag == flags.FLAG_UNREALIZED)
    ]

    # Internalize entries with internal/external flows.
    entries, internalized_entries = internalize(entries, transfer_account,
                                                accounts_value,
                                                accounts_intflows,
                                                accounts_internalize)
    accounts_value.add(transfer_account)

    # Segment the entries, splitting at entries with external flow and computing
    # the balances before and after. This returns all such periods with the
    # balances at their beginning and end.
    periods, portfolio_entries = segment_periods(entries, accounts_value,
                                                 accounts_intflows, date_begin,
                                                 date_end)

    # From the period balances, compute the returns.
    logging.info("Calculating period returns.")
    logging.info("")
    all_returns = []
    for (period_begin, period_end, balance_begin, balance_end) in periods:
        period_returns, mktvalues = compute_period_returns(
            period_begin, period_end, balance_begin, balance_end, price_map)
        mktvalue_begin, mktvalue_end = mktvalues
        all_returns.append(period_returns)

        try:
            annual_returns = (annualize_returns(period_returns, period_begin,
                                                period_end)
                              if period_end != period_begin else {})
        except OverflowError:
            annual_returns = 'OVERFLOW'

        logging.info("From %s to %s", period_begin, period_end)
        logging.info("  Begin %s => %s", balance_begin.units(), mktvalue_begin)
        logging.info("  End   %s => %s", balance_end.units(), mktvalue_end)
        logging.info("  Returns     %s", period_returns)
        logging.info("  Annualized  %s", annual_returns)
        logging.info("")

    # Compute the piecewise returns. Note that we have to be careful to handle
    # all available currencies.
    currencies = set(currency for returns in all_returns
                     for currency in returns.keys())
    total_returns = {}
    for currency in currencies:
        total_return = 1.
        for returns in all_returns:
            total_return *= returns.get(currency, 1.)
        total_returns[currency] = total_return

    date_first = periods[0][0]
    date_last = periods[-1][1]
    return total_returns, (date_first, date_last), internalized_entries