Exemplo n.º 1
0
    def test_iter_weeks(self):
        lifetimes_map = {
            ('AAPL', 'USD'):
            [(datetime.date(2014, 2, 3), datetime.date(2014, 3, 10)),
             (datetime.date(2014, 5, 20), datetime.date(2014, 7, 1))],
            ('USD', None): [(datetime.date(2014, 1, 1), None)]
        }

        required_prices = list(
            lifetimes.required_weekly_prices(lifetimes_map,
                                             datetime.date(2014, 9, 1)))
        self.assertEqual([(datetime.date(2014, 1, 31), 'AAPL', 'USD'),
                          (datetime.date(2014, 2, 7), 'AAPL', 'USD'),
                          (datetime.date(2014, 2, 14), 'AAPL', 'USD'),
                          (datetime.date(2014, 2, 21), 'AAPL', 'USD'),
                          (datetime.date(2014, 2, 28), 'AAPL', 'USD'),
                          (datetime.date(2014, 3, 7), 'AAPL', 'USD'),
                          (datetime.date(2014, 5, 16), 'AAPL', 'USD'),
                          (datetime.date(2014, 5, 23), 'AAPL', 'USD'),
                          (datetime.date(2014, 5, 30), 'AAPL', 'USD'),
                          (datetime.date(2014, 6, 6), 'AAPL', 'USD'),
                          (datetime.date(2014, 6, 13), 'AAPL', 'USD'),
                          (datetime.date(2014, 6, 20), 'AAPL', 'USD'),
                          (datetime.date(2014, 6, 27), 'AAPL', 'USD')],
                         required_prices)
Exemplo n.º 2
0
def get_price_jobs_up_to_date(entries,
                              date_last=None,
                              inactive=False,
                              undeclared_source=None,
                              update_rate='weekday',
                              compress_days=1):
    """Get a list of trailing prices to fetch from a stream of entries.

    The list of dates runs from the latest available price up to the latest date.

    Args:
      entries: list of Beancount entries
      date_last: The date up to where to find prices to as an exclusive range end.
      inactive: Include currencies with no balance at the given date. The default
        is to only include those currencies which have a non-zero balance.
      undeclared_source: A string, the name of the default source module to use to
        pull prices for commodities without a price source metadata on their
        Commodity directive declaration.
    Returns:
      A list of DatedPrice instances.
    """
    price_map = prices.build_price_map(entries)

    # Find the list of declared currencies, and from it build a mapping for
    # tickers for each (base, quote) pair. This is the only place tickers
    # appear.
    declared_triples = find_currencies_declared(entries, date_last)
    currency_map = {(base, quote): psources
                    for base, quote, psources in declared_triples}

    # Compute the initial list of currencies to consider.
    if undeclared_source:
        # Use the full set of possible currencies.
        cur_at_cost = find_prices.find_currencies_at_cost(entries)
        cur_converted = find_prices.find_currencies_converted(
            entries, date_last)
        cur_priced = find_prices.find_currencies_priced(entries, date_last)
        currencies = cur_at_cost | cur_converted | cur_priced
        log_currency_list("Currency held at cost", cur_at_cost)
        log_currency_list("Currency converted", cur_converted)
        log_currency_list("Currency priced", cur_priced)
        default_source = import_source(undeclared_source)
    else:
        # Use the currencies from the Commodity directives.
        currencies = set(currency_map.keys())
        default_source = None

    log_currency_list("Currencies in primary list", currencies)

    # By default, restrict to only the currencies with non-zero balances
    # up to the given date.
    # Also, find the earliest start date to fetch prices from.
    # Look at both latest prices and start dates.
    lifetimes_map = lifetimes.get_commodity_lifetimes(entries)
    commodity_map = getters.get_commodity_directives(entries)
    price_start_dates = {}
    stale_currencies = set()

    if inactive:
        for base_quote in currencies:
            if lifetimes_map[base_quote]:
                # Use first date from lifetime
                lifetimes_map[base_quote] = [(lifetimes_map[base_quote][0][0],
                                              None)]
            else:
                # Insert never active commodities into lifetimes
                # Start from date of currency directive
                base, _ = base_quote
                commodity_entry = commodity_map.get(base, None)
                lifetimes_map[base_quote] = [(commodity_entry.date, None)]
    else:
        #Compress any lifetimes based on compress_days
        lifetimes_map = lifetimes.compress_lifetimes_days(
            lifetimes_map, compress_days)

    #Trim lifetimes based on latest price dates.
    for base_quote in lifetimes_map:
        intervals = lifetimes_map[base_quote]
        result = prices.get_latest_price(price_map, base_quote)
        if (result is None or result[0] is None):
            lifetimes_map[base_quote] = \
                lifetimes.trim_intervals(intervals,
                                         None,
                                         date_last)
        else:
            latest_price_date = result[0]
            date_first = latest_price_date + datetime.timedelta(days=1)
            if date_first < date_last:
                lifetimes_map[base_quote] = \
                    lifetimes.trim_intervals(intervals,
                                            date_first,
                                            date_last)
            else:
                # We don't need to update if we're already up to date.
                lifetimes_map[base_quote] = []

    # Remove currency pairs we can't fetch any prices for.
    if not default_source:
        keys = list(lifetimes_map.keys())
        for key in keys:
            if not currency_map.get(key, None):
                del lifetimes_map[key]

    # Create price jobs based on fetch rate
    if update_rate == 'daily':
        required_prices = lifetimes.required_daily_prices(lifetimes_map,
                                                          date_last,
                                                          weekdays_only=False)
    elif update_rate == 'weekday':
        required_prices = lifetimes.required_daily_prices(lifetimes_map,
                                                          date_last,
                                                          weekdays_only=True)
    elif update_rate == 'weekly':
        required_prices = lifetimes.required_weekly_prices(
            lifetimes_map, date_last)
    else:
        raise ValueError('Invalid Update Rate')

    jobs = []
    # Build up the list of jobs to fetch prices for.
    for key in required_prices:
        date, base, quote = key
        psources = currency_map.get((base, quote), None)
        if not psources:
            psources = [PriceSource(default_source, base, False)]

        jobs.append(DatedPrice(base, quote, date, psources))

    return sorted(jobs)
Exemplo n.º 3
0
def main():
    parse_date = lambda s: parse_datetime(s).date()

    import argparse, logging
    logging.basicConfig(level=logging.INFO,
                        format='%(levelname)-8s: %(message)s')
    parser = argparse.ArgumentParser(description=__doc__.strip())
    parser.add_argument('filename', help='Ledger filename')
    parser.add_argument('-c',
                        '--compress-days',
                        action='store',
                        type=int,
                        default=60,
                        help="The number of unused days to ignore.")
    parser.add_argument('-m',
                        '--min-date',
                        action='store',
                        type=parse_date,
                        default=None,
                        help="The minimum date to consider")
    args = parser.parse_args()

    # Load the ledger.
    entries, errors, options_map = loader.load_file(args.filename)

    # Build a map of existing price entries.
    price_map = {}
    for entry in entries:
        if isinstance(entry, data.Price):
            key = (entry.date, entry.currency, entry.amount.currency)
            price_map[key] = entry

    # Compute the lifetimes of currencies and compress them.
    lifetimes_map = lifetimes.get_commodity_lifetimes(entries)
    lifetimes_map = lifetimes.compress_lifetimes_days(lifetimes_map,
                                                      args.compress_days)

    # Create price directives for missing prices.
    prices = []
    for key in lifetimes.required_weekly_prices(lifetimes_map,
                                                entries[-1].date):
        # If the price entry is already in the ledger, ignore it.
        if key in price_map:
            continue
        date, currency, cost_currency = key

        # Ignore entries too early.
        if args.min_date is not None and date < args.min_date:
            continue

        # Ignore entries with an empty cost currency.
        if cost_currency is None:
            continue

        # Create a price directive.
        price = data.Price(data.new_metadata(__file__, 0), date, currency,
                           Amount(ZERO, cost_currency))
        prices.append(price)

    # For now, just print those out.
    printer.print_entries(prices)