def delete_notifications_older_than_retention_by_type(notification_type, qry_limit=50000): current_app.logger.info( 'Deleting {} notifications for services with flexible data retention'.format(notification_type)) flexible_data_retention = ServiceDataRetention.query.filter( ServiceDataRetention.notification_type == notification_type ).all() deleted = 0 for f in flexible_data_retention: current_app.logger.info( "Deleting {} notifications for service id: {}".format(notification_type, f.service_id)) day_to_delete_backwards_from = get_london_midnight_in_utc( convert_utc_to_bst(datetime.utcnow()).date()) - timedelta(days=f.days_of_retention) deleted += _move_notifications_to_notification_history( notification_type, f.service_id, day_to_delete_backwards_from, qry_limit) current_app.logger.info( 'Deleting {} notifications for services without flexible data retention'.format(notification_type)) seven_days_ago = get_london_midnight_in_utc(convert_utc_to_bst(datetime.utcnow()).date()) - timedelta(days=7) services_with_data_retention = [x.service_id for x in flexible_data_retention] service_ids_to_purge = db.session.query(Service.id).filter(Service.id.notin_(services_with_data_retention)).all() for service_id in service_ids_to_purge: deleted += _move_notifications_to_notification_history( notification_type, service_id, seven_days_ago, qry_limit) current_app.logger.info('Finished deleting {} notifications'.format(notification_type)) return deleted
def populate_redis_template_usage(service_id, day): """ Recalculate and replace the stats in redis for a day. To be used if redis data is lost for some reason. """ if not current_app.config['REDIS_ENABLED']: current_app.logger.error( 'Cannot populate redis template usage - redis not enabled') sys.exit(1) # the day variable is set by click to be midnight of that day start_time = get_london_midnight_in_utc(day) end_time = get_london_midnight_in_utc(day + timedelta(days=1)) usage = { str(row.template_id): row.count for row in db.session.query(Notification.template_id, func.count().label('count')). filter(Notification.service_id == service_id, Notification.created_at >= start_time, Notification.created_at < end_time).group_by( Notification.template_id) } current_app.logger.info( 'Populating usage dict for service {} day {}: {}'.format( service_id, day, usage.items())) if usage: key = cache_key_for_service_template_usage_per_day(service_id, day) redis_store.set_hash_and_expire( key, usage, current_app.config['EXPIRE_CACHE_EIGHT_DAYS'], raise_exception=True)
def rebuild_ft_billing_for_day(service_id, day): """ Rebuild the data in ft_billing for the given service_id and date """ def rebuild_ft_data(process_day, service): deleted_rows = delete_billing_data_for_service_for_day( process_day, service) current_app.logger.info( 'deleted {} existing billing rows for {} on {}'.format( deleted_rows, service, process_day)) transit_data = fetch_billing_data_for_day(process_day=process_day, service_id=service) # transit_data = every row that should exist for data in transit_data: # upsert existing rows update_fact_billing(data, process_day) current_app.logger.info( 'added/updated {} billing rows for {} on {}'.format( len(transit_data), service, process_day)) if service_id: # confirm the service exists dao_fetch_service_by_id(service_id) rebuild_ft_data(day, service_id) else: services = get_service_ids_that_need_billing_populated( get_london_midnight_in_utc(day), get_london_midnight_in_utc(day + timedelta(days=1))) for row in services: rebuild_ft_data(day, row.service_id)
def dao_fetch_todays_stats_for_all_services(include_from_test_key=True, only_active=True): today = date.today() start_date = get_london_midnight_in_utc(today) end_date = get_london_midnight_in_utc(today + timedelta(days=1)) subquery = db.session.query( Notification.notification_type, Notification.status, Notification.service_id, func.count(Notification.id).label('count')).filter( Notification.created_at >= start_date, Notification.created_at < end_date).group_by( Notification.notification_type, Notification.status, Notification.service_id) if not include_from_test_key: subquery = subquery.filter(Notification.key_type != KEY_TYPE_TEST) subquery = subquery.subquery() query = db.session.query( Service.id.label('service_id'), Service.name, Service.restricted, Service.research_mode, Service.active, Service.created_at, subquery.c.notification_type, subquery.c.status, subquery.c.count).outerjoin( subquery, subquery.c.service_id == Service.id).order_by(Service.id) if only_active: query = query.filter(Service.active) return query.all()
def fetch_notification_status_for_service_for_day(bst_day, service_id): return db.session.query( # return current month as a datetime so the data has the same shape as the ft_notification_status query literal(bst_day.replace(day=1), type_=DateTime).label('month'), Notification.notification_type, Notification.status.label('notification_status'), func.count().label('count')).filter( Notification.created_at >= get_london_midnight_in_utc(bst_day), Notification.created_at < get_london_midnight_in_utc(bst_day + timedelta(days=1)), Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST).group_by( Notification.notification_type, Notification.status).all()
def save_daily_notification_processing_time(bst_date=None): # bst_date is a string in the format of "YYYY-MM-DD" if bst_date is None: # if a date is not provided, we run against yesterdays data bst_date = (datetime.utcnow() - timedelta(days=1)).date() else: bst_date = datetime.strptime(bst_date, "%Y-%m-%d").date() start_time = get_london_midnight_in_utc(bst_date) end_time = get_london_midnight_in_utc(bst_date + timedelta(days=1)) result = dao_get_notifications_processing_time_stats(start_time, end_time) insert_update_processing_time( FactProcessingTime( bst_date=bst_date, messages_total=result.messages_total, messages_within_10_secs=result.messages_within_10_secs))
def validate_date_range_is_within_a_financial_year(start_date, end_date): try: start_date = datetime.strptime(start_date, "%Y-%m-%d").date() end_date = datetime.strptime(end_date, "%Y-%m-%d").date() except ValueError: raise InvalidRequest(message="Input must be a date in the format: YYYY-MM-DD", status_code=400) if end_date < start_date: raise InvalidRequest(message="Start date must be before end date", status_code=400) start_fy = get_financial_year_for_datetime(get_london_midnight_in_utc(start_date)) end_fy = get_financial_year_for_datetime(get_london_midnight_in_utc(end_date)) if start_fy != end_fy: raise InvalidRequest(message="Date must be in a single financial year.", status_code=400) return start_date, end_date
def send_total_sent_notifications_to_performance_platform(bst_date): count_dict = total_sent_notifications.get_total_sent_notifications_for_day(bst_date) start_time = get_london_midnight_in_utc(bst_date) email_sent_count = count_dict['email'] sms_sent_count = count_dict['sms'] letter_sent_count = count_dict['letter'] current_app.logger.info( "Attempting to update Performance Platform for {} with {} emails, {} text messages and {} letters" .format(bst_date, email_sent_count, sms_sent_count, letter_sent_count) ) total_sent_notifications.send_total_notifications_sent_for_day_stats( start_time, 'sms', sms_sent_count ) total_sent_notifications.send_total_notifications_sent_for_day_stats( start_time, 'email', email_sent_count ) total_sent_notifications.send_total_notifications_sent_for_day_stats( start_time, 'letter', letter_sent_count )
def get_rate( non_letter_rates, letter_rates, notification_type, date, crown=None, letter_page_count=None, post_class='second' ): start_of_day = get_london_midnight_in_utc(date) if notification_type == LETTER_TYPE: if letter_page_count == 0: return 0 return next( r.rate for r in letter_rates if ( start_of_day >= r.start_date and crown == r.crown and letter_page_count == r.sheet_count and post_class == r.post_class ) ) elif notification_type == SMS_TYPE: return next( r.rate for r in non_letter_rates if ( notification_type == r.notification_type and start_of_day >= r.valid_from ) ) else: return 0
def fetch_aggregate_stats_by_date_range_for_all_services(start_date, end_date): start_date = get_london_midnight_in_utc(start_date) end_date = get_london_midnight_in_utc(end_date + timedelta(days=1)) table = NotificationHistory if start_date >= datetime.utcnow() - timedelta(days=7): table = Notification query = db.session.query(table.notification_type, table.status, table.key_type, func.count(table.id).label('count')).filter( table.created_at >= start_date, table.created_at < end_date).group_by( table.notification_type, table.key_type, table.status).order_by( table.notification_type, ) return query.all()
def fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True, only_active=True): start_date = get_london_midnight_in_utc(start_date) end_date = get_london_midnight_in_utc(end_date + timedelta(days=1)) table = NotificationHistory if start_date >= datetime.utcnow() - timedelta(days=7): table = Notification subquery = db.session.query( table.notification_type, table.status, table.service_id, func.count(table.id).label('count') ).filter( table.created_at >= start_date, table.created_at < end_date ).group_by( table.notification_type, table.status, table.service_id ) if not include_from_test_key: subquery = subquery.filter(table.key_type != KEY_TYPE_TEST) subquery = subquery.subquery() query = db.session.query( Service.id.label('service_id'), Service.name, Service.restricted, Service.research_mode, Service.active, Service.created_at, subquery.c.notification_type, subquery.c.status, subquery.c.count ).outerjoin( subquery, subquery.c.service_id == Service.id ).order_by(Service.id) if only_active: query = query.filter(Service.active) return query.all()
def dao_get_template_usage(service_id, day): start = get_london_midnight_in_utc(day) end = get_london_midnight_in_utc(day + timedelta(days=1)) notifications_aggregate_query = db.session.query( func.count().label('count'), Notification.template_id).filter( Notification.created_at >= start, Notification.created_at < end, Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST, ).group_by(Notification.template_id).subquery() query = db.session.query( Template.id, Template.name, Template.template_type, Template.is_precompiled_letter, func.coalesce( notifications_aggregate_query.c.count, 0).label('count')).outerjoin( notifications_aggregate_query, notifications_aggregate_query.c.template_id == Template.id ).filter(Template.service_id == service_id).order_by(Template.name) return query.all()
def fetch_notification_status_for_service_for_today_and_7_previous_days(service_id, by_template=False, limit_days=7): start_date = midnight_n_days_ago(limit_days) now = datetime.utcnow() stats_for_7_days = db.session.query( FactNotificationStatus.notification_type.label('notification_type'), FactNotificationStatus.notification_status.label('status'), *([FactNotificationStatus.template_id.label('template_id')] if by_template else []), FactNotificationStatus.notification_count.label('count') ).filter( FactNotificationStatus.service_id == service_id, FactNotificationStatus.bst_date >= start_date, FactNotificationStatus.key_type != KEY_TYPE_TEST ) stats_for_today = db.session.query( Notification.notification_type.cast(db.Text), Notification.status, *([Notification.template_id] if by_template else []), func.count().label('count') ).filter( Notification.created_at >= get_london_midnight_in_utc(now), Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST ).group_by( Notification.notification_type, *([Notification.template_id] if by_template else []), Notification.status ) all_stats_table = stats_for_7_days.union_all(stats_for_today).subquery() query = db.session.query( *([ Template.name.label("template_name"), Template.is_precompiled_letter, all_stats_table.c.template_id ] if by_template else []), all_stats_table.c.notification_type, all_stats_table.c.status, func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), ) if by_template: query = query.filter(all_stats_table.c.template_id == Template.id) return query.group_by( *([Template.name, Template.is_precompiled_letter, all_stats_table.c.template_id] if by_template else []), all_stats_table.c.notification_type, all_stats_table.c.status, ).all()
def dao_get_inbound_sms_for_service(service_id, limit=None, user_number=None): start_date = get_london_midnight_in_utc(date.today() - timedelta(days=6)) q = InboundSms.query.filter( InboundSms.service_id == service_id, InboundSms.created_at >= start_date ).order_by( InboundSms.created_at.desc() ) if user_number: q = q.filter(InboundSms.user_number == user_number) if limit: q = q.limit(limit) return q.all()
def fetch_notification_status_totals_for_all_services(start_date, end_date): stats = db.session.query( FactNotificationStatus.notification_type.label('notification_type'), FactNotificationStatus.notification_status.label('status'), FactNotificationStatus.key_type.label('key_type'), func.sum(FactNotificationStatus.notification_count).label('count') ).filter( FactNotificationStatus.bst_date >= start_date, FactNotificationStatus.bst_date <= end_date ).group_by( FactNotificationStatus.notification_type, FactNotificationStatus.notification_status, FactNotificationStatus.key_type, ) today = get_london_midnight_in_utc(datetime.utcnow()) if start_date <= datetime.utcnow().date() <= end_date: stats_for_today = db.session.query( Notification.notification_type.cast(db.Text).label('notification_type'), Notification.status, Notification.key_type, func.count().label('count') ).filter( Notification.created_at >= today ).group_by( Notification.notification_type.cast(db.Text), Notification.status, Notification.key_type, ) all_stats_table = stats.union_all(stats_for_today).subquery() query = db.session.query( all_stats_table.c.notification_type, all_stats_table.c.status, all_stats_table.c.key_type, func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), ).group_by( all_stats_table.c.notification_type, all_stats_table.c.status, all_stats_table.c.key_type, ).order_by( all_stats_table.c.notification_type ) else: query = stats.order_by( FactNotificationStatus.notification_type ) return query.all()
def dao_get_paginated_most_recent_inbound_sms_by_user_number_for_service( service_id, page ): """ This query starts from inbound_sms and joins on to itself to find the most recent row for each user_number Equivalent sql: SELECT t1.* FROM inbound_sms t1 LEFT OUTER JOIN inbound_sms AS t2 ON ( -- identifying t1.user_number = t2.user_number AND t1.service_id = t2.service_id AND -- ordering t1.created_at < t2.created_at ) WHERE t2.id IS NULL AND t1.service_id = :service_id ORDER BY t1.created_at DESC; LIMIT 50 OFFSET :page """ start_date = get_london_midnight_in_utc(date.today() - timedelta(days=6)) t2 = aliased(InboundSms) q = db.session.query( InboundSms ).outerjoin( t2, and_( InboundSms.user_number == t2.user_number, InboundSms.service_id == t2.service_id, InboundSms.created_at < t2.created_at, ) ).filter( t2.id == None, # noqa InboundSms.service_id == service_id, InboundSms.created_at >= start_date ).order_by( InboundSms.created_at.desc() ) return q.paginate( page=page, per_page=current_app.config['PAGE_SIZE'] )
def get_total_sent_notifications_for_day(day): start_date = get_london_midnight_in_utc(day) end_date = start_date + timedelta(days=1) email_count = get_total_sent_notifications_in_date_range(start_date, end_date, 'email') sms_count = get_total_sent_notifications_in_date_range(start_date, end_date, 'sms') letter_count = get_total_sent_notifications_in_date_range(start_date, end_date, 'letter') return { "start_date": start_date, "email": { "count": email_count }, "sms": { "count": sms_count }, "letter": { "count": letter_count }, }
def backfill_processing_time(start_date, end_date): """ Send historical processing time to Performance Platform. """ delta = end_date - start_date print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date)) for i in range(delta.days + 1): # because the tz conversion funcs talk about midnight, and the midnight before last, # we want to pretend we're running this from the next morning, so add one. process_date = start_date + timedelta(days=i + 1) process_start_date = get_midnight_for_day_before(process_date) process_end_date = get_london_midnight_in_utc(process_date) print('Sending notification processing-time for {} - {}'.format( process_start_date.isoformat(), process_end_date.isoformat() )) send_processing_time_for_start_and_end(process_start_date, process_end_date)
def dao_count_inbound_sms_for_service(service_id): start_date = get_london_midnight_in_utc(date.today() - timedelta(days=6)) return InboundSms.query.filter( InboundSms.service_id == service_id, InboundSms.created_at >= start_date ).count()
def fetch_count_of_complaints(start_date, end_date): start_date = get_london_midnight_in_utc(start_date) end_date = get_london_midnight_in_utc(end_date + timedelta(days=1)) return Complaint.query.filter(Complaint.created_at >= start_date, Complaint.created_at < end_date).count()
def fetch_stats_for_all_services_by_date_range(start_date, end_date, include_from_test_key=True): stats = db.session.query( FactNotificationStatus.service_id.label('service_id'), Service.name.label('name'), Service.restricted.label('restricted'), Service.research_mode.label('research_mode'), Service.active.label('active'), Service.created_at.label('created_at'), FactNotificationStatus.notification_type.label('notification_type'), FactNotificationStatus.notification_status.label('status'), func.sum( FactNotificationStatus.notification_count).label('count')).filter( FactNotificationStatus.bst_date >= start_date, FactNotificationStatus.bst_date <= end_date, FactNotificationStatus.service_id == Service.id, ).group_by( FactNotificationStatus.service_id.label('service_id'), Service.name, Service.restricted, Service.research_mode, Service.active, Service.created_at, FactNotificationStatus.notification_type, FactNotificationStatus.notification_status, ).order_by(FactNotificationStatus.service_id, FactNotificationStatus.notification_type) if not include_from_test_key: stats = stats.filter(FactNotificationStatus.key_type != KEY_TYPE_TEST) if start_date <= datetime.utcnow().date() <= end_date: today = get_london_midnight_in_utc(datetime.utcnow()) subquery = db.session.query( Notification.notification_type.cast( db.Text).label('notification_type'), Notification.status.label('status'), Notification.service_id.label('service_id'), func.count(Notification.id).label('count')).filter( Notification.created_at >= today).group_by( Notification.notification_type, Notification.status, Notification.service_id) if not include_from_test_key: subquery = subquery.filter(Notification.key_type != KEY_TYPE_TEST) subquery = subquery.subquery() stats_for_today = db.session.query( Service.id.label('service_id'), Service.name.label('name'), Service.restricted.label('restricted'), Service.research_mode.label('research_mode'), Service.active.label('active'), Service.created_at.label('created_at'), subquery.c.notification_type.label('notification_type'), subquery.c.status.label('status'), subquery.c.count.label('count')).outerjoin( subquery, subquery.c.service_id == Service.id) all_stats_table = stats.union_all(stats_for_today).subquery() query = db.session.query( all_stats_table.c.service_id, all_stats_table.c.name, all_stats_table.c.restricted, all_stats_table.c.research_mode, all_stats_table.c.active, all_stats_table.c.created_at, all_stats_table.c.notification_type, all_stats_table.c.status, func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), ).group_by( all_stats_table.c.service_id, all_stats_table.c.name, all_stats_table.c.restricted, all_stats_table.c.research_mode, all_stats_table.c.active, all_stats_table.c.created_at, all_stats_table.c.notification_type, all_stats_table.c.status, ).order_by(all_stats_table.c.name, all_stats_table.c.notification_type, all_stats_table.c.status) else: query = stats return query.all()
def send_processing_time_to_performance_platform(bst_date): start_time = get_london_midnight_in_utc(bst_date) end_time = get_london_midnight_in_utc(bst_date + timedelta(days=1)) send_processing_time_for_start_and_end(start_time, end_time)
def test_get_london_midnight_in_utc_returns_expected_date(date, expected_date): assert get_london_midnight_in_utc(date) == expected_date
def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): # services_dao.replaces dao_fetch_monthly_historical_usage_by_template_for_service stats = db.session.query( FactNotificationStatus.template_id.label('template_id'), Template.name.label('name'), Template.template_type.label('template_type'), Template.is_precompiled_letter.label('is_precompiled_letter'), extract('month', FactNotificationStatus.bst_date).label('month'), extract('year', FactNotificationStatus.bst_date).label('year'), func.sum( FactNotificationStatus.notification_count).label('count')).join( Template, FactNotificationStatus.template_id == Template.id).filter( FactNotificationStatus.service_id == service_id, FactNotificationStatus.bst_date >= start_date, FactNotificationStatus.bst_date <= end_date, FactNotificationStatus.key_type != KEY_TYPE_TEST, FactNotificationStatus.notification_status != NOTIFICATION_CANCELLED, ).group_by( FactNotificationStatus.template_id, Template.name, Template.template_type, Template.is_precompiled_letter, extract('month', FactNotificationStatus.bst_date).label('month'), extract('year', FactNotificationStatus.bst_date).label('year'), ).order_by(extract('year', FactNotificationStatus.bst_date), extract('month', FactNotificationStatus.bst_date), Template.name) if start_date <= datetime.utcnow() <= end_date: today = get_london_midnight_in_utc(datetime.utcnow()) month = get_london_month_from_utc_column(Notification.created_at) stats_for_today = db.session.query( Notification.template_id.label('template_id'), Template.name.label('name'), Template.template_type.label('template_type'), Template.is_precompiled_letter.label('is_precompiled_letter'), extract('month', month).label('month'), extract('year', month).label('year'), func.count().label('count')).join( Template, Notification.template_id == Template.id, ).filter(Notification.created_at >= today, Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST, Notification.status != NOTIFICATION_CANCELLED).group_by( Notification.template_id, Template.hidden, Template.name, Template.template_type, month) all_stats_table = stats.union_all(stats_for_today).subquery() query = db.session.query( all_stats_table.c.template_id, all_stats_table.c.name, all_stats_table.c.is_precompiled_letter, all_stats_table.c.template_type, func.cast(all_stats_table.c.month, Integer).label('month'), func.cast(all_stats_table.c.year, Integer).label('year'), func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), ).group_by( all_stats_table.c.template_id, all_stats_table.c.name, all_stats_table.c.is_precompiled_letter, all_stats_table.c.template_type, all_stats_table.c.month, all_stats_table.c.year, ).order_by(all_stats_table.c.year, all_stats_table.c.month, all_stats_table.c.name) else: query = stats return query.all()
def send_processing_time_to_performance_platform(): today = datetime.utcnow() start_date = get_midnight_for_day_before(today) end_date = get_london_midnight_in_utc(today) send_processing_time_for_start_and_end(start_date, end_date)