def weekly_downloads(): """ Update 7-day add-on download counts. """ raise_if_reindex_in_progress('amo') cursor = connection.cursor() cursor.execute(""" SELECT addon_id, SUM(count) AS weekly_count FROM download_counts WHERE `date` >= DATE_SUB(CURDATE(), INTERVAL 7 DAY) GROUP BY addon_id ORDER BY addon_id""") counts = cursor.fetchall() addon_ids = [r[0] for r in counts] if not addon_ids: return cursor.execute(""" SELECT id, 0 FROM addons WHERE id NOT IN %s""", (addon_ids,)) counts += cursor.fetchall() cursor.execute(""" CREATE TEMPORARY TABLE tmp_wd (addon_id INT PRIMARY KEY, count INT)""") cursor.execute('INSERT INTO tmp_wd VALUES %s' % ','.join(['(%s,%s)'] * len(counts)), list(itertools.chain(*counts))) cursor.execute(""" UPDATE addons INNER JOIN tmp_wd ON addons.id = tmp_wd.addon_id SET weeklydownloads = tmp_wd.count""") cursor.execute("DROP TABLE IF EXISTS tmp_wd") transaction.commit_unless_managed()
def index_latest_mkt_stats(index=None, aliased=True): raise_if_reindex_in_progress() yesterday = datetime.date.today() - datetime.timedelta(days=1) try: latest = Contribution.search(index).order_by('-date').values_dict() latest_contribution = latest and latest[0]['date'] or yesterday except pyes.exceptions.SearchPhaseExecutionException: latest_contribution = yesterday try: latest = Installed.search(index).order_by('-date').values_dict() latest_install = latest and latest[0]['date'] or yesterday except pyes.exceptions.SearchPhaseExecutionException: latest_install = yesterday latest = min(latest_contribution, latest_install) fmt = lambda d: d.strftime('%Y-%m-%d') date_range = '%s:%s' % (fmt(latest), fmt(datetime.date.today())) cron_log.info('index_mkt_stats --date=%s' % date_range) call_command('index_mkt_stats', addons=None, date=date_range, index=index, aliased=True)
def weekly_downloads(): """ Update 7-day add-on download counts. """ raise_if_reindex_in_progress() cursor = connection.cursor() cursor.execute(""" SELECT addon_id, SUM(count) AS weekly_count FROM download_counts WHERE `date` >= DATE_SUB(CURDATE(), INTERVAL 7 DAY) GROUP BY addon_id ORDER BY addon_id""") counts = cursor.fetchall() addon_ids = [r[0] for r in counts] if not addon_ids: return cursor.execute(""" SELECT id, 0 FROM addons WHERE id NOT IN %s""", (addon_ids,)) counts += cursor.fetchall() cursor.execute(""" CREATE TEMPORARY TABLE tmp (addon_id INT PRIMARY KEY, count INT)""") cursor.execute('INSERT INTO tmp VALUES %s' % ','.join(['(%s,%s)'] * len(counts)), list(itertools.chain(*counts))) cursor.execute(""" UPDATE addons INNER JOIN tmp ON addons.id = tmp.addon_id SET weeklydownloads = tmp.count""") transaction.commit_unless_managed()
def update_daily_theme_user_counts(): """Store the day's theme popularity counts into ThemeUserCount.""" raise_if_reindex_in_progress("amo") d = Persona.objects.values_list("addon", "popularity").order_by("id") date = datetime.now().strftime("%M-%d-%y") ts = [_update_daily_theme_user_counts.subtask(args=[chunk], kwargs={"date": date}) for chunk in chunked(d, 250)] TaskSet(ts).apply_async()
def update_daily_theme_user_counts(): """Store the day's theme popularity counts into ThemeUserCount.""" raise_if_reindex_in_progress('amo') d = Persona.objects.values_list('addon', 'popularity').order_by('id') date = datetime.now().strftime('%M-%d-%y') ts = [_update_daily_theme_user_counts.subtask(args=[chunk], kwargs={'date': date}) for chunk in chunked(d, 250)] TaskSet(ts).apply_async()
def update_addons_collections_downloads(): """Update addons+collections download totals.""" raise_if_reindex_in_progress() d = (AddonCollectionCount.objects.values('addon', 'collection') .annotate(sum=Sum('count'))) ts = [tasks.update_addons_collections_downloads.subtask(args=[chunk]) for chunk in chunked(d, 100)] TaskSet(ts).apply_async()
def index_latest_stats(index=None, aliased=True): raise_if_reindex_in_progress() latest = UpdateCount.search(index).order_by('-date').values_dict() if latest: latest = latest[0]['date'] else: latest = datetime.date.today() - datetime.timedelta(days=1) fmt = lambda d: d.strftime('%Y-%m-%d') date_range = '%s:%s' % (fmt(latest), fmt(datetime.date.today())) cron_log.info('index_stats --date=%s' % date_range) call_command('index_stats', addons=None, date=date_range)
def index_latest_stats(index=None): raise_if_reindex_in_progress('amo') fmt = lambda d: d.strftime('%Y-%m-%d') latest = UpdateCount.search(index).order_by('-date').values_dict() if latest: latest = latest[0]['date'] else: latest = fmt(datetime.date.today() - datetime.timedelta(days=1)) date_range = '%s:%s' % (latest, fmt(datetime.date.today())) cron_log.info('index_stats --date=%s' % date_range) call_command('index_stats', addons=None, date=date_range)
def update_addons_collections_downloads(): """Update addons+collections download totals.""" raise_if_reindex_in_progress('amo') d = (AddonCollectionCount.objects.values( 'addon', 'collection').annotate(sum=Sum('count'))) ts = [ tasks.update_addons_collections_downloads.subtask(args=[chunk]) for chunk in chunked(d, 100) ] TaskSet(ts).apply_async()
def update_weekly_downloads(): """Update the weekly "downloads" from the users_install table.""" raise_if_reindex_in_progress() interval = datetime.datetime.today() - datetime.timedelta(days=7) counts = (Installed.objects.values('addon') .filter(created__gte=interval, addon__type=amo.ADDON_WEBAPP) .annotate(count=Count('addon'))) ts = [webapp_update_weekly_downloads.subtask(args=[chunk]) for chunk in chunked(counts, 1000)] TaskSet(ts).apply_async()
def update_weekly_downloads(): """Update the weekly "downloads" from the users_install table.""" raise_if_reindex_in_progress() interval = datetime.today() - timedelta(days=7) counts = (Installed.objects.values('addon') .filter(created__gte=interval, addon__type=amo.ADDON_WEBAPP) .annotate(count=Count('addon'))) ts = [webapp_update_weekly_downloads.subtask(args=[chunk]) for chunk in chunked(counts, 1000)] TaskSet(ts).apply_async()
def index_latest_stats(index=None): def fmt(d): return d.strftime('%Y-%m-%d') raise_if_reindex_in_progress('amo') latest = UpdateCount.search(index).order_by('-date').values_dict() if latest: latest = latest[0]['date'] else: latest = fmt(datetime.date.today() - datetime.timedelta(days=1)) date_range = '%s:%s' % (latest, fmt(datetime.date.today())) cron_log.info('index_stats --date=%s' % date_range) call_command('index_stats', addons=None, date=date_range)
def update_addon_average_daily_users(): """Update add-ons ADU totals.""" raise_if_reindex_in_progress("amo") cursor = connections[multidb.get_slave()].cursor() q = """SELECT addon_id, AVG(`count`) FROM update_counts WHERE `date` > DATE_SUB(CURDATE(), INTERVAL 7 DAY) GROUP BY addon_id ORDER BY addon_id""" cursor.execute(q) d = cursor.fetchall() cursor.close() ts = [_update_addon_average_daily_users.subtask(args=[chunk]) for chunk in chunked(d, 250)] TaskSet(ts).apply_async()
def update_addon_average_daily_users(): """Update add-ons ADU totals.""" raise_if_reindex_in_progress('amo') cursor = connections[multidb.get_slave()].cursor() q = """SELECT addon_id, AVG(`count`) FROM update_counts WHERE `date` > DATE_SUB(CURDATE(), INTERVAL 7 DAY) GROUP BY addon_id ORDER BY addon_id""" cursor.execute(q) d = cursor.fetchall() cursor.close() ts = [_update_addon_average_daily_users.subtask(args=[chunk]) for chunk in chunked(d, 250)] TaskSet(ts).apply_async()
def update_global_totals(date=None): """Update global statistics totals.""" raise_if_reindex_in_progress() if date: date = datetime.datetime.strptime(date, '%Y-%m-%d').date() today = date or datetime.date.today() today_jobs = [dict(job=job, date=today) for job in tasks._get_daily_jobs(date)] max_update = date or UpdateCount.objects.aggregate(max=Max('date'))['max'] metrics_jobs = [dict(job=job, date=max_update) for job in tasks._get_metrics_jobs(date)] ts = [tasks.update_global_totals.subtask(kwargs=kw) for kw in today_jobs + metrics_jobs] TaskSet(ts).apply_async()
def update_global_totals(date=None): """Update global statistics totals.""" raise_if_reindex_in_progress('amo') if date: date = datetime.datetime.strptime(date, '%Y-%m-%d').date() today = date or datetime.date.today() today_jobs = [ dict(job=job, date=today) for job in tasks._get_daily_jobs(date) ] max_update = date or UpdateCount.objects.aggregate(max=Max('date'))['max'] metrics_jobs = [ dict(job=job, date=max_update) for job in tasks._get_metrics_jobs(date) ] ts = [ tasks.update_global_totals.subtask(kwargs=kw) for kw in today_jobs + metrics_jobs ] TaskSet(ts).apply_async()
def weekly_downloads(): """ Update 7-day add-on download counts. """ if not waffle.switch_is_active('local-statistics-processing'): return False raise_if_reindex_in_progress('amo') cursor = connection.cursor() cursor.execute(""" SELECT addon_id, SUM(count) AS weekly_count FROM download_counts WHERE `date` >= DATE_SUB(CURDATE(), INTERVAL 7 DAY) GROUP BY addon_id ORDER BY addon_id""") counts = cursor.fetchall() addon_ids = [r[0] for r in counts] if not addon_ids: return cursor.execute(""" SELECT id, 0 FROM addons WHERE id NOT IN %s""", (addon_ids,)) counts += cursor.fetchall() cursor.execute(""" CREATE TEMPORARY TABLE tmp_wd (addon_id INT PRIMARY KEY, count INT)""") cursor.execute('INSERT INTO tmp_wd VALUES %s' % ','.join(['(%s,%s)'] * len(counts)), list(itertools.chain(*counts))) cursor.execute(""" UPDATE addons INNER JOIN tmp_wd ON addons.id = tmp_wd.addon_id SET weeklydownloads = tmp_wd.count""") cursor.execute("DROP TABLE IF EXISTS tmp_wd")