def test_lifetimes_different_currencies(self, entries, errors, _): """ 2000-01-01 open Assets:US:Invest:Cash USD 2000-01-01 open Assets:US:Invest:AAPL AAPL 2000-01-01 open Assets:US:Invest:CSCO CSCO 2000-01-01 open Assets:US:Invest:INTL INTL 2000-01-01 open Assets:US:Invest:IBM IBM 2000-01-01 open Income:US:Invest:PnL 2000-01-01 open Assets:US:Bank:Checking USD 2000-01-02 * "Deposit" Assets:US:Bank:Checking -10000 USD Assets:US:Invest:Cash 10000 USD 2001-02-10 * "Buy shares" Assets:US:Invest:AAPL 10 AAPL {43.00 USD} Assets:US:Invest:CSCO 10 CSCO {21.00 USD} Assets:US:Invest:INTL 10 INTL {75.00 USD} Assets:US:Invest:IBM 10 IBM {16.00 USD} Assets:US:Invest:Cash 2001-07-20 * "Sell AAPL" Assets:US:Invest:AAPL -10 AAPL {43.00 USD} Assets:US:Invest:Cash 500.00 USD Income:US:Invest:PnL 2001-07-21 * "Sell CSCO" Assets:US:Invest:CSCO -10 CSCO {21.00 USD} Assets:US:Invest:Cash 300.00 USD Income:US:Invest:PnL 2001-07-22 * "Sell INTL" Assets:US:Invest:INTL -10 INTL {75.00 USD} Assets:US:Invest:Cash 800.00 USD Income:US:Invest:PnL 2001-07-23 * "Sell IBM" Assets:US:Invest:IBM -10 IBM {16.00 USD} Assets:US:Invest:Cash 200.00 USD Income:US:Invest:PnL """ self.assertFalse(errors) lifetimes_map = lifetimes.get_commodity_lifetimes(entries) self.assertEqual( { ('USD', None): [(datetime.date(2000, 1, 2), None)], ('AAPL', 'USD'): [(datetime.date( 2001, 2, 10), datetime.date(2001, 7, 21))], ('CSCO', 'USD'): [(datetime.date( 2001, 2, 10), datetime.date(2001, 7, 22))], ('INTL', 'USD'): [(datetime.date( 2001, 2, 10), datetime.date(2001, 7, 23))], ('IBM', 'USD'): [(datetime.date( 2001, 2, 10), datetime.date(2001, 7, 24))] }, lifetimes_map)
def test_lifetimes_closed_open(self, entries, errors, _): """ 2000-01-01 open Assets:US:Invest:Cash USD 2000-01-01 open Assets:US:Invest:AAPL AAPL 2000-01-01 open Income:US:Invest:PnL 2000-01-01 open Assets:US:Bank:Checking USD 2000-01-02 * "Deposit" Assets:US:Bank:Checking -10000 USD Assets:US:Invest:Cash 10000 USD 2001-03-10 * "Buy Apple" Assets:US:Invest:AAPL 10 AAPL {43.40 USD} Assets:US:Invest:Cash 2001-08-10 * "Sell some Apple - some will remain" Assets:US:Invest:AAPL -8 AAPL {43.40 USD} Assets:US:Invest:Cash 360.00 USD Income:US:Invest:PnL 2001-12-10 * "Sell remaining Apple - this completes the interval" Assets:US:Invest:AAPL -2 AAPL {43.40 USD} Assets:US:Invest:Cash 96.00 USD Income:US:Invest:PnL 2002-02-10 * "Buy Apple again - this begins a new interval" Assets:US:Invest:AAPL 5 AAPL {48.00 USD} Assets:US:Invest:Cash 2002-06-10 * "Sell Apple again - this ends it" Assets:US:Invest:AAPL -5 AAPL {48.00 USD} Assets:US:Invest:Cash 260.00 USD Income:US:Invest:PnL 2003-04-10 * "Buy Apple - keep this open" Assets:US:Invest:AAPL 7 AAPL {50.00 USD} Assets:US:Invest:Cash """ self.assertFalse(errors) lifetimes_map = lifetimes.get_commodity_lifetimes(entries) self.assertEqual( { ('USD', None): [(datetime.date(2000, 1, 2), None)], ('AAPL', 'USD'): [(datetime.date(2001, 3, 10), datetime.date(2001, 12, 11)), (datetime.date(2002, 2, 10), datetime.date(2002, 6, 11)), (datetime.date(2003, 4, 10), None)] }, lifetimes_map)
def render_text(self, entries, errors, options_map, file): lifetimes_map = lifetimes.get_commodity_lifetimes(entries) if self.args.compress_days: lifetimes_map = lifetimes.compress_lifetimes_days(lifetimes_map, self.args.compress_days) name_map = {pair: '{}/{}'.format(pair[0], pair[1]) if pair[1] else pair[0] for pair in lifetimes_map.keys()} ccywidth = max(map(len, name_map.values())) for currency, lifetime in sorted(lifetimes_map.items(), key=lambda x: (x[1][0][0], x[0])): file.write('{:{width}}: {}\n'.format( name_map[currency], ' / '.join('{} - {}'.format(begin, end or '') for begin, end in lifetime), width=ccywidth))
def test_lifetimes_cross_accounts(self, entries, errors, _): """ 2000-01-01 open Assets:US:InvestA:Cash USD 2000-01-01 open Assets:US:InvestA:AAPL AAPL 2000-01-01 open Income:US:InvestA:PnL 2000-01-01 open Assets:US:InvestB:Cash USD 2000-01-01 open Assets:US:InvestB:AAPL AAPL 2000-01-01 open Income:US:InvestB:PnL 2000-01-01 open Assets:US:Bank:Checking USD 2000-01-02 * "Deposit" Assets:US:Bank:Checking -10000 USD Assets:US:InvestA:Cash 5000 USD Assets:US:InvestB:Cash 5000 USD 2001-03-10 * "Buy Apple - in first account" Assets:US:InvestA:AAPL 10 AAPL {43.40 USD} Assets:US:InvestA:Cash 2001-05-10 * "Buy Apple - in second account" Assets:US:InvestB:AAPL 10 AAPL {44.10 USD} Assets:US:InvestB:Cash 2001-06-10 * "Sell Apple - in first account, resulting position is zero" Assets:US:InvestA:AAPL -10 AAPL {43.40 USD} Assets:US:InvestA:Cash 500.00 USD Income:US:InvestA:PnL 2001-06-11 balance Assets:US:InvestA:AAPL 0 AAPL 2001-09-10 * "Sell Apple - in second account, this is the last AAPL position" Assets:US:InvestB:AAPL -10 AAPL {44.10 USD} Assets:US:InvestB:Cash 500.00 USD Income:US:InvestB:PnL 2001-09-11 balance Assets:US:InvestB:AAPL 0 AAPL """ self.assertFalse(errors) lifetimes_map = lifetimes.get_commodity_lifetimes(entries) self.assertEqual( { ('AAPL', 'USD'): [(datetime.date( 2001, 3, 10), datetime.date(2001, 9, 11))], ('USD', None): [(datetime.date(2000, 1, 2), None)] }, lifetimes_map)
def get_price_jobs_up_to_date(entries, date_last=None, inactive=False, undeclared_source=None, update_rate='weekday', compress_days=1): """Get a list of trailing prices to fetch from a stream of entries. The list of dates runs from the latest available price up to the latest date. Args: entries: list of Beancount entries date_last: The date up to where to find prices to as an exclusive range end. inactive: Include currencies with no balance at the given date. The default is to only include those currencies which have a non-zero balance. undeclared_source: A string, the name of the default source module to use to pull prices for commodities without a price source metadata on their Commodity directive declaration. Returns: A list of DatedPrice instances. """ price_map = prices.build_price_map(entries) # Find the list of declared currencies, and from it build a mapping for # tickers for each (base, quote) pair. This is the only place tickers # appear. declared_triples = find_currencies_declared(entries, date_last) currency_map = {(base, quote): psources for base, quote, psources in declared_triples} # Compute the initial list of currencies to consider. if undeclared_source: # Use the full set of possible currencies. cur_at_cost = find_prices.find_currencies_at_cost(entries) cur_converted = find_prices.find_currencies_converted( entries, date_last) cur_priced = find_prices.find_currencies_priced(entries, date_last) currencies = cur_at_cost | cur_converted | cur_priced log_currency_list("Currency held at cost", cur_at_cost) log_currency_list("Currency converted", cur_converted) log_currency_list("Currency priced", cur_priced) default_source = import_source(undeclared_source) else: # Use the currencies from the Commodity directives. currencies = set(currency_map.keys()) default_source = None log_currency_list("Currencies in primary list", currencies) # By default, restrict to only the currencies with non-zero balances # up to the given date. # Also, find the earliest start date to fetch prices from. # Look at both latest prices and start dates. lifetimes_map = lifetimes.get_commodity_lifetimes(entries) commodity_map = getters.get_commodity_directives(entries) price_start_dates = {} stale_currencies = set() if inactive: for base_quote in currencies: if lifetimes_map[base_quote]: # Use first date from lifetime lifetimes_map[base_quote] = [(lifetimes_map[base_quote][0][0], None)] else: # Insert never active commodities into lifetimes # Start from date of currency directive base, _ = base_quote commodity_entry = commodity_map.get(base, None) lifetimes_map[base_quote] = [(commodity_entry.date, None)] else: #Compress any lifetimes based on compress_days lifetimes_map = lifetimes.compress_lifetimes_days( lifetimes_map, compress_days) #Trim lifetimes based on latest price dates. for base_quote in lifetimes_map: intervals = lifetimes_map[base_quote] result = prices.get_latest_price(price_map, base_quote) if (result is None or result[0] is None): lifetimes_map[base_quote] = \ lifetimes.trim_intervals(intervals, None, date_last) else: latest_price_date = result[0] date_first = latest_price_date + datetime.timedelta(days=1) if date_first < date_last: lifetimes_map[base_quote] = \ lifetimes.trim_intervals(intervals, date_first, date_last) else: # We don't need to update if we're already up to date. lifetimes_map[base_quote] = [] # Remove currency pairs we can't fetch any prices for. if not default_source: keys = list(lifetimes_map.keys()) for key in keys: if not currency_map.get(key, None): del lifetimes_map[key] # Create price jobs based on fetch rate if update_rate == 'daily': required_prices = lifetimes.required_daily_prices(lifetimes_map, date_last, weekdays_only=False) elif update_rate == 'weekday': required_prices = lifetimes.required_daily_prices(lifetimes_map, date_last, weekdays_only=True) elif update_rate == 'weekly': required_prices = lifetimes.required_weekly_prices( lifetimes_map, date_last) else: raise ValueError('Invalid Update Rate') jobs = [] # Build up the list of jobs to fetch prices for. for key in required_prices: date, base, quote = key psources = currency_map.get((base, quote), None) if not psources: psources = [PriceSource(default_source, base, False)] jobs.append(DatedPrice(base, quote, date, psources)) return sorted(jobs)
def main(): parse_date = lambda s: parse_datetime(s).date() import argparse, logging logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s') parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('filename', help='Ledger filename') parser.add_argument('-c', '--compress-days', action='store', type=int, default=60, help="The number of unused days to ignore.") parser.add_argument('-m', '--min-date', action='store', type=parse_date, default=None, help="The minimum date to consider") args = parser.parse_args() # Load the ledger. entries, errors, options_map = loader.load_file(args.filename) # Build a map of existing price entries. price_map = {} for entry in entries: if isinstance(entry, data.Price): key = (entry.date, entry.currency, entry.amount.currency) price_map[key] = entry # Compute the lifetimes of currencies and compress them. lifetimes_map = lifetimes.get_commodity_lifetimes(entries) lifetimes_map = lifetimes.compress_lifetimes_days(lifetimes_map, args.compress_days) # Create price directives for missing prices. prices = [] for key in lifetimes.required_weekly_prices(lifetimes_map, entries[-1].date): # If the price entry is already in the ledger, ignore it. if key in price_map: continue date, currency, cost_currency = key # Ignore entries too early. if args.min_date is not None and date < args.min_date: continue # Ignore entries with an empty cost currency. if cost_currency is None: continue # Create a price directive. price = data.Price(data.new_metadata(__file__, 0), date, currency, Amount(ZERO, cost_currency)) prices.append(price) # For now, just print those out. printer.print_entries(prices)