def get_detailed_services(start_date, end_date, only_active=False, include_from_test_key=True): if start_date == datetime.utcnow().date(): stats = dao_fetch_todays_stats_for_all_services( include_from_test_key=include_from_test_key, only_active=only_active) else: stats = fetch_stats_for_all_services_by_date_range( start_date=start_date, end_date=end_date, include_from_test_key=include_from_test_key, ) results = [] for service_id, rows in itertools.groupby(stats, lambda x: x.service_id): rows = list(rows) s = statistics.format_statistics(rows) results.append({ 'id': str(rows[0].service_id), 'name': rows[0].name, 'notification_type': rows[0].notification_type, 'research_mode': rows[0].research_mode, 'restricted': rows[0].restricted, 'active': rows[0].active, 'created_at': rows[0].created_at, 'statistics': s }) return results
def get_service_statistics(service_id, today_only, limit_days=7): # today_only flag is used by the send page to work out if the service will exceed their daily usage by sending a job if today_only: stats = dao_fetch_todays_stats_for_service(service_id) else: stats = fetch_notification_status_for_service_for_today_and_7_previous_days(service_id, limit_days=limit_days) return statistics.format_statistics(stats)
def get_detailed_service(service_id, today_only=False): service = dao_fetch_service_by_id(service_id) stats_fn = dao_fetch_todays_stats_for_service if today_only else dao_fetch_stats_for_service stats = stats_fn(service_id) service.statistics = statistics.format_statistics(stats) return detailed_service_schema.dump(service).data
def get_detailed_services(start_date, end_date, only_active=False, include_from_test_key=True): if start_date == datetime.utcnow().date(): stats = dao_fetch_todays_stats_for_all_services( include_from_test_key=include_from_test_key, only_active=only_active) else: stats = fetch_stats_by_date_range_for_all_services( start_date=start_date, end_date=end_date, include_from_test_key=include_from_test_key, only_active=only_active) results = [] for service_id, rows in itertools.groupby(stats, lambda x: x.service_id): rows = list(rows) if rows[0].count is None: s = statistics.create_zeroed_stats_dicts() else: s = statistics.format_statistics(rows) sid = str(rows[0].service_id) s[EMAIL_TYPE]['templates'] = len( dao_get_all_templates_for_service(sid, EMAIL_TYPE)) s[SMS_TYPE]['templates'] = len( dao_get_all_templates_for_service(sid, SMS_TYPE)) s[LETTER_TYPE]['templates'] = len( dao_get_all_templates_for_service(sid, LETTER_TYPE)) domains = set() for user in dao_fetch_active_users_for_service(sid): parts = user.email_address.split('@') if len(parts) != 2: continue domains.add(parts[1].lower()) results.append({ 'id': sid, 'name': rows[0].name, 'notification_type': rows[0].notification_type, 'research_mode': rows[0].research_mode, 'restricted': rows[0].restricted, 'active': rows[0].active, 'created_at': rows[0].created_at, 'statistics': s, 'domains': sorted(list(domains)), 'organisation_type': rows[0].organisation_type }) return results
def test_format_statistics(stats, email_counts, sms_counts): ret = format_statistics(stats) assert ret['email'] == { status: count for status, count in zip(['requested', 'delivered', 'failed'], email_counts) } assert ret['sms'] == { status: count for status, count in zip(['requested', 'delivered', 'failed'], sms_counts) }
def test_format_statistics(stats, email_counts, sms_counts, letter_counts): ret = format_statistics(stats) assert ret['email'] == { status: count for status, count in zip(['requested', 'delivered', 'failed'], email_counts) } assert ret['sms'] == { status: count for status, count in zip(['requested', 'delivered', 'failed'], sms_counts) } assert ret['letter'] == { status: count for status, count in zip(['requested', 'delivered', 'failed'], letter_counts) }