def index_finance_total_by_currency(addons, **kw): """ Bug 757581 Total finance stats, currency breakdown. """ es = elasticutils.get_es() log.info('Indexing total financial stats by currency for %s apps.' % len(addons)) for addon in addons: # Get all contributions for given add-on. qs = Contribution.objects.filter(addon=addon, uuid=None) if not qs.exists(): continue # Get list of distinct currencies. currencies = set(qs.values_list('currency', flat=True)) for currency in currencies: try: key = ord_word('cur' + str(addon) + currency.lower()) data = search.get_finance_total_by_currency( qs, addon, currency) if not already_indexed(Contribution, data): Contribution.index(data, bulk=True, id=key) es.flush_bulk(forced=True) except Exception, exc: index_finance_total_by_currency.retry(args=[addons], exc=exc) raise
def index_contribution_counts(ids, **kw): """ Contribution stats by addon-date unique pair Uses a nested dictionary to not index duplicate contribution with same addon/date pairs. For each addon-date, it stores the addon in the dict as a top level key with a dict as its value. And it stores the date in the addon's dict as a second level key. To check if an addon-date pair has been already index, it looks up the dict[addon][date] to see if the key exists. """ es = elasticutils.get_es() qs = (Contribution.objects.filter(id__in=ids) .order_by('created').values('addon', 'created')) try: addons_dates = defaultdict(lambda: defaultdict(dict)) for contribution in qs: addon = contribution['addon'] date = contribution['created'].strftime('%Y%m%d') # date for addon not processed, index it and give it key if not date in addons_dates[addon]: key = '%s-%s' % (addon, date) data = search.extract_contribution_counts(contribution) Contribution.index(data, bulk=True, id=key) addons_dates[addon][date] = 0 if qs: log.info('Indexed %s addons/apps for contribution stats: %s' % (len(addons_dates), qs[0]['created'])) es.flush_bulk(forced=True) except Exception, exc: index_contribution_counts.retry(args=[ids], exc=exc) raise
def index_finance_total_by_currency(addons, **kw): """ Bug 757581 Total finance stats, currency breakdown. """ index = kw.get('index', Contribution._get_index()) es = amo.search.get_es() log.info('Indexing total financial stats by currency for %s apps.' % len(addons)) for addon in addons: # Get all contributions for given add-on. qs = Contribution.objects.filter(addon=addon, uuid=None) if not qs.exists(): continue # Get list of distinct currencies. currencies = set(qs.values_list('currency', flat=True)) for currency in currencies: try: key = ord_word('cur' + str(addon) + currency.lower()) data = search.get_finance_total( qs, addon, 'currency', currency=currency) for index in get_indices(index): if not already_indexed(Contribution, data, index): Contribution.index(data, bulk=True, id=key, index=index) es.flush_bulk(forced=True) except Exception, exc: index_finance_total_by_currency.retry(args=[addons], exc=exc, **kw) raise
def index_finance_total_by_src(addons, **kw): """ Bug 758059 Total finance stats, source breakdown. """ es = elasticutils.get_es() log.info('Indexing total financial stats by source for %s apps.' % len(addons)) for addon in addons: # Get all contributions for given add-on. qs = Contribution.objects.filter(addon=addon, uuid=None) if not qs.exists(): continue # Get list of distinct sources. sources = set(qs.values_list('source', flat=True)) for source in sources: try: key = ord_word('src' + str(addon) + str(source)) data = search.get_finance_total_by_src(qs, addon, source) if not already_indexed(Contribution, data): Contribution.index(data, bulk=True, id=key) es.flush_bulk(forced=True) except Exception, exc: index_finance_total_by_src.retry(args=[addons], exc=exc) raise
def index_finance_total_by_src(addons, **kw): """ Bug 758059 Total finance stats, source breakdown. """ index = kw.get('index', Contribution._get_index()) es = amo.search.get_es() log.info('Indexing total financial stats by source for %s apps.' % len(addons)) for addon in addons: # Get all contributions for given add-on. qs = Contribution.objects.filter(addon=addon, uuid=None) if not qs.exists(): continue # Get list of distinct sources. sources = set(qs.values_list('source', flat=True)) for source in sources: try: key = ord_word('src' + str(addon) + str(source)) data = search.get_finance_total(qs, addon, 'source', source=source) for index in get_indices(index): if not already_indexed(Contribution, data, index): Contribution.index(data, bulk=True, id=key, index=index) es.flush_bulk(forced=True) except Exception, exc: index_finance_total_by_src.retry(args=[addons], exc=exc, **kw) raise
def index_contribution_counts(ids, **kw): """ Contribution stats by addon-date unique pair Uses a nested dictionary to not index duplicate contribution with same addon/date pairs. For each addon-date, it stores the addon in the dict as a top level key with a dict as its value. And it stores the date in the addon's dict as a second level key. To check if an addon-date pair has been already index, it looks up the dict[addon][date] to see if the key exists. """ es = elasticutils.get_es() qs = (Contribution.objects.filter(id__in=ids).order_by('created').values( 'addon', 'created')) try: addons_dates = defaultdict(lambda: defaultdict(dict)) for contribution in qs: addon = contribution['addon'] date = contribution['created'].strftime('%Y%m%d') # date for addon not processed, index it and give it key if not date in addons_dates[addon]: key = '%s-%s' % (addon, date) data = search.extract_contribution_counts(contribution) Contribution.index(data, bulk=True, id=key) addons_dates[addon][date] = 0 if qs: log.info('Indexed %s addons/apps for contribution stats: %s' % (len(addons_dates), qs[0]['created'])) es.flush_bulk(forced=True) except Exception, exc: index_contribution_counts.retry(args=[ids], exc=exc) raise
def index_finance_daily(ids, **kw): """ Bug 748015 Takes a list of Contribution ids and uses its addon and date fields to index stats for that day. Contribution stats by addon-date unique pair. Uses a nested dictionary to not index duplicate contribution with same addon/date pairs. For each addon-date, it stores the addon in the dict as a top level key with a dict as its value. And it stores the date in the add-on's dict as a second level key. To check if an addon-date pair has been already index, it looks up the dict[addon][date] to see if the key exists. This adds some speed up when batch processing. ids -- ids of apps.stats.Contribution objects """ index = kw.get('index', Contribution._get_index()) es = amo.search.get_es() # Get contributions. qs = (Contribution.objects.filter(id__in=ids).order_by('created').values( 'addon', 'created')) log.info('[%s] Indexing %s contributions for daily stats.' % (qs[0]['created'], len(ids))) addons_dates = defaultdict(lambda: defaultdict(dict)) for contribution in qs: addon = contribution['addon'] date = contribution['created'].strftime('%Y%m%d') try: # Date for add-on not processed, index it and give it key. if not date in addons_dates[addon]: key = ord_word('fin' + str(addon) + str(date)) data = search.get_finance_daily(contribution) for index in get_indices(index): if not already_indexed(Contribution, data, index): Contribution.index(data, bulk=True, id=key, index=index) addons_dates[addon][date] = 0 es.flush_bulk(forced=True) except Exception, exc: index_finance_daily.retry(args=[ids], exc=exc, **kw) raise
def index_finance_daily(ids, **kw): """ Bug 748015 Takes a list of Contribution ids and uses its addon and date fields to index stats for that day. Contribution stats by addon-date unique pair. Uses a nested dictionary to not index duplicate contribution with same addon/date pairs. For each addon-date, it stores the addon in the dict as a top level key with a dict as its value. And it stores the date in the add-on's dict as a second level key. To check if an addon-date pair has been already index, it looks up the dict[addon][date] to see if the key exists. This adds some speed up when batch processing. ids -- ids of apps.stats.Contribution objects """ index = kw.get('index', Contribution._get_index()) es = amo.search.get_es() # Get contributions. qs = (Contribution.objects.filter(id__in=ids) .order_by('created').values('addon', 'created')) log.info('[%s] Indexing %s contributions for daily stats.' % (qs[0]['created'], len(ids))) addons_dates = defaultdict(lambda: defaultdict(dict)) for contribution in qs: addon = contribution['addon'] date = contribution['created'].strftime('%Y%m%d') try: # Date for add-on not processed, index it and give it key. if not date in addons_dates[addon]: key = ord_word('fin' + str(addon) + str(date)) data = search.get_finance_daily(contribution) for index in get_indices(index): if not already_indexed(Contribution, data, index): Contribution.index(data, bulk=True, id=key, index=index) addons_dates[addon][date] = 0 es.flush_bulk(forced=True) except Exception, exc: index_finance_daily.retry(args=[ids], exc=exc, **kw) raise
def index_addon_aggregate_contributions(addons, **kw): """ Aggregates stats from all of the contributions for a given addon """ es = elasticutils.get_es() log.info('Aggregating total contribution stats for %s addons' % len(addons)) try: for addon in addons: # Only count uuid=None; those are verified transactions. qs = Contribution.objects.filter(addon__in=addons, uuid=None) # Create lists of annotated dicts [{'addon':1, 'revenue':5}...] revenues = qs.values('addon').annotate(revenue=Sum('amount')) sales = qs.values('addon').annotate(sales=Count('id')) refunds = (qs.filter( refund__isnull=False).values('addon').annotate( refunds=Count('id'))) # Loop over revenue, sales, refunds. data_dict = defaultdict(lambda: defaultdict(dict)) for revenue in revenues: data_dict[str( revenue['addon'])]['revenue'] = revenue['revenue'] for sale in sales: data_dict[str(sale['addon'])]['sales'] = sale['sales'] for refund in refunds: data_dict[str(refund['addon'])]['refunds'] = refund['refunds'] for addon, addon_dict in data_dict.iteritems(): data = { 'addon': addon, 'count': addon_dict['sales'], 'revenue': addon_dict['revenue'], 'refunds': addon_dict['refunds'], } Contribution.index(data, bulk=True, id=addon) es.flush_bulk(forced=True) except Exception, exc: index_addon_aggregate_contributions.retry(args=[addons], exc=exc) raise
def index_finance_total(addons, **kw): """ Aggregates financial stats from all of the contributions for a given app. """ es = elasticutils.get_es() log.info('Indexing total financial stats for %s apps.' % len(addons)) for addon in addons: # Get all contributions for given add-on. qs = Contribution.objects.filter(addon=addon, uuid=None) if not qs.exists(): continue try: key = ord_word('tot' + str(addon)) data = search.get_finance_total(qs, addon) if not already_indexed(Contribution, data): Contribution.index(data, bulk=True, id=key) es.flush_bulk(forced=True) except Exception, exc: index_finance_total.retry(args=[addons], exc=exc) raise
def index_addon_aggregate_contributions(addons, **kw): """ Aggregates stats from all of the contributions for a given addon """ es = elasticutils.get_es() log.info('Aggregating total contribution stats for %s addons' % len(addons)) try: for addon in addons: # Only count uuid=None; those are verified transactions. qs = Contribution.objects.filter(addon__in=addons, uuid=None) # Create lists of annotated dicts [{'addon':1, 'revenue':5}...] revenues = qs.values('addon').annotate(revenue=Sum('amount')) sales = qs.values('addon').annotate(sales=Count('id')) refunds = (qs.filter(refund__isnull=False). values('addon').annotate(refunds=Count('id'))) # Loop over revenue, sales, refunds. data_dict = defaultdict(lambda: defaultdict(dict)) for revenue in revenues: data_dict[str( revenue['addon'])]['revenue'] = revenue['revenue'] for sale in sales: data_dict[str(sale['addon'])]['sales'] = sale['sales'] for refund in refunds: data_dict[str(refund['addon'])]['refunds'] = refund['refunds'] for addon, addon_dict in data_dict.iteritems(): data = { 'addon': addon, 'count': addon_dict['sales'], 'revenue': addon_dict['revenue'], 'refunds': addon_dict['refunds'], } Contribution.index(data, bulk=True, id=addon) es.flush_bulk(forced=True) except Exception, exc: index_addon_aggregate_contributions.retry(args=[addons], exc=exc) raise