def delete_inbound_sms_older_than_retention():
    current_app.logger.info(
        'Deleting inbound sms for services with flexible data retention')

    flexible_data_retention = ServiceDataRetention.query.join(
        ServiceDataRetention.service, Service.inbound_number).filter(
            ServiceDataRetention.notification_type == SMS_TYPE).all()

    deleted = 0

    for f in flexible_data_retention:
        n_days_ago = midnight_n_days_ago(f.days_of_retention)

        current_app.logger.info(
            "Deleting inbound sms for service id: {}".format(f.service_id))
        deleted += _delete_inbound_sms(
            n_days_ago, query_filter=[InboundSms.service_id == f.service_id])

    current_app.logger.info(
        'Deleting inbound sms for services without flexible data retention')

    seven_days_ago = midnight_n_days_ago(7)

    deleted += _delete_inbound_sms(seven_days_ago,
                                   query_filter=[
                                       InboundSms.service_id.notin_(
                                           x.service_id
                                           for x in flexible_data_retention),
                                   ])

    current_app.logger.info('Deleted {} inbound sms'.format(deleted))

    return deleted
def dao_get_uploads_by_service_id(service_id,
                                  limit_days=None,
                                  page=1,
                                  page_size=50):
    # Hardcoded filter to exclude cancelled or scheduled jobs
    # for the moment, but we may want to change this method take 'statuses' as a argument in the future
    jobs_query_filter = [
        Job.service_id == service_id,
        Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
        Job.original_file_name !=
        current_app.config['ONE_OFF_MESSAGE_FILENAME'],
        Job.job_status.notin_([JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED])
    ]
    if limit_days is not None:
        jobs_query_filter.append(
            Job.created_at >= midnight_n_days_ago(limit_days))

    jobs_query = db.session.query(
        Job.id, Job.original_file_name, Job.notification_count,
        Job.created_at.label("created_at"),
        Job.scheduled_for.label("scheduled_for"),
        Job.processing_started.label('processing_started'),
        Job.job_status.label("status"),
        literal('job').label('upload_type')).filter(*jobs_query_filter)

    letters_query_filter = [
        Notification.service_id == service_id,
        Notification.notification_type == LETTER_TYPE,
        Notification.api_key_id == None,  # noqa
        Notification.status != NOTIFICATION_CANCELLED,
        Template.hidden == True,
    ]
    if limit_days is not None:
        letters_query_filter.append(
            Notification.created_at >= midnight_n_days_ago(limit_days))

    letters_query = db.session.query(
        Notification.id,
        Notification.client_reference.label('original_file_name'),
        literal('1').label('notification_count'),
        Notification.created_at.label("created_at"),
        literal(None).label('scheduled_for'),
        # letters don't have a processing_started date but we want created_at to be used for sorting
        Notification.created_at.label('processing_started'),
        Notification.status,
        literal('letter').label('upload_type')).join(
            Template, Notification.template_id == Template.id).filter(
                *letters_query_filter)

    return jobs_query.union_all(letters_query).order_by(
        desc("processing_started"),
        desc("created_at")).paginate(page=page, per_page=page_size)
Esempio n. 3
0
def returned_letter_statistics(service_id):

    most_recent = fetch_most_recent_returned_letter(service_id)

    if not most_recent:
        return jsonify({
            'returned_letter_count': 0,
            'most_recent_report': None,
        })

    most_recent_reported_at = datetime.combine(
        most_recent.reported_at, datetime.min.time()
    )

    if most_recent_reported_at < midnight_n_days_ago(7):
        return jsonify({
            'returned_letter_count': 0,
            'most_recent_report': most_recent.reported_at.strftime(DATETIME_FORMAT_NO_TIMEZONE),
        })

    count = fetch_recent_returned_letter_count(service_id)

    return jsonify({
        'returned_letter_count': count.returned_letter_count,
        'most_recent_report': most_recent.reported_at.strftime(DATETIME_FORMAT_NO_TIMEZONE),
    })
def dao_get_paginated_most_recent_inbound_sms_by_user_number_for_service(
        service_id, page, limit_days):
    """
    This query starts from inbound_sms and joins on to itself to find the most recent row for each user_number.

    Equivalent sql:

    SELECT t1.*
    FROM inbound_sms t1
    LEFT OUTER JOIN inbound_sms AS t2 ON (
        -- identifying
        t1.user_number = t2.user_number AND
        t1.service_id = t2.service_id AND
        -- ordering
        t1.created_at < t2.created_at
    )
    WHERE t2.id IS NULL AND t1.service_id = :service_id
    ORDER BY t1.created_at DESC;
    LIMIT 50 OFFSET :page
    """
    t2 = aliased(InboundSms)
    q = db.session.query(InboundSms).outerjoin(
        t2,
        and_(
            InboundSms.user_number == t2.user_number,
            InboundSms.service_id == t2.service_id,
            InboundSms.created_at < t2.created_at,
        )).filter(
            t2.id == None,  # noqa
            InboundSms.service_id == service_id,
            InboundSms.created_at >= midnight_n_days_ago(limit_days)).order_by(
                InboundSms.created_at.desc())

    return q.paginate(page=page, per_page=current_app.config['PAGE_SIZE'])
def fetch_recent_returned_letter_count(service_id):
    return db.session.query(
        func.count(ReturnedLetter.notification_id).label(
            'returned_letter_count'), ).filter(
                ReturnedLetter.service_id == service_id,
                ReturnedLetter.reported_at > midnight_n_days_ago(7),
            ).one()
Esempio n. 6
0
def dao_get_jobs_by_service_id(
    service_id,
    *,
    limit_days=None,
    page=1,
    page_size=50,
    statuses=None,
    contact_list_id=None,
):
    query_filter = [
        Job.service_id == service_id,
        Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
        Job.original_file_name !=
        current_app.config['ONE_OFF_MESSAGE_FILENAME'],
    ]
    if limit_days is not None:
        query_filter.append(Job.created_at >= midnight_n_days_ago(limit_days))
    if statuses is not None and statuses != ['']:
        query_filter.append(Job.job_status.in_(statuses))
    if contact_list_id is not None:
        query_filter.append(Job.contact_list_id == contact_list_id)
    return Job.query \
        .filter(*query_filter) \
        .order_by(Job.processing_started.desc(), Job.created_at.desc()) \
        .paginate(page=page, per_page=page_size)
Esempio n. 7
0
def get_paginated_jobs(service_id, limit_days, statuses, page):
    pagination = dao_get_jobs_by_service_id(
        service_id,
        limit_days=limit_days,
        page=page,
        page_size=current_app.config['PAGE_SIZE'],
        statuses=statuses
    )
    data = job_schema.dump(pagination.items, many=True).data
    for job_data in data:
        start = job_data['processing_started']
        start = dateutil.parser.parse(start).replace(tzinfo=None) if start else None

        if start is None:
            statistics = []
        elif start.replace(tzinfo=None) < midnight_n_days_ago(3):
            # ft_notification_status table
            statistics = fetch_notification_statuses_for_job(job_data['id'])
        else:
            # notifications table
            statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id'])
        job_data['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in statistics]

    return {
        'data': data,
        'page_size': pagination.per_page,
        'total': pagination.total,
        'links': pagination_links(
            pagination,
            '.get_jobs_by_service',
            service_id=service_id
        )
    }
Esempio n. 8
0
def get_paginated_uploads(service_id, limit_days, page):
    pagination = dao_get_uploads_by_service_id(
        service_id,
        limit_days=limit_days,
        page=page,
        page_size=current_app.config['PAGE_SIZE'])
    uploads = pagination.items
    data = []
    for upload in uploads:
        upload_dict = {
            'id':
            upload.id,
            'original_file_name':
            upload.original_file_name,
            'notification_count':
            upload.notification_count,
            'created_at':
            upload.scheduled_for.strftime("%Y-%m-%d %H:%M:%S")
            if upload.scheduled_for else
            upload.created_at.strftime("%Y-%m-%d %H:%M:%S"),
            'upload_type':
            upload.upload_type,
            'template_type':
            upload.template_type,
            'recipient':
            upload.recipient,
        }
        if upload.upload_type == 'job':
            start = upload.processing_started

            if start is None:
                statistics = []
            elif start.replace(tzinfo=None) < midnight_n_days_ago(3):
                # ft_notification_status table
                statistics = fetch_notification_statuses_for_job(upload.id)
            else:
                # notifications table
                statistics = dao_get_notification_outcomes_for_job(
                    service_id, upload.id)
            upload_dict['statistics'] = [{
                'status': statistic.status,
                'count': statistic.count
            } for statistic in statistics]
        else:
            upload_dict['statistics'] = []
        data.append(upload_dict)

    return {
        'data':
        data,
        'page_size':
        pagination.per_page,
        'total':
        pagination.total,
        'links':
        pagination_links(pagination,
                         '.get_uploads_by_service',
                         service_id=service_id)
    }
Esempio n. 9
0
def get_notifications_for_service(
        service_id,
        filter_dict=None,
        page=1,
        page_size=None,
        count_pages=True,
        limit_days=None,
        key_type=None,
        personalisation=False,
        include_jobs=False,
        include_from_test_key=False,
        older_than=None,
        client_reference=None,
        include_one_off=True
):
    if page_size is None:
        page_size = current_app.config['PAGE_SIZE']

    filters = [Notification.service_id == service_id]

    if limit_days is not None:
        filters.append(Notification.created_at >= midnight_n_days_ago(limit_days))

    if older_than is not None:
        older_than_created_at = db.session.query(
            Notification.created_at).filter(Notification.id == older_than).as_scalar()
        filters.append(Notification.created_at < older_than_created_at)

    if not include_jobs:
        filters.append(Notification.job_id == None)  # noqa

    if not include_one_off:
        filters.append(Notification.created_by_id == None)  # noqa

    if key_type is not None:
        filters.append(Notification.key_type == key_type)
    elif not include_from_test_key:
        filters.append(Notification.key_type != KEY_TYPE_TEST)

    if client_reference is not None:
        filters.append(Notification.client_reference == client_reference)

    query = Notification.query.filter(*filters)
    query = _filter_query(query, filter_dict)
    if personalisation:
        query = query.options(
            joinedload('template')
        )

    return query.order_by(desc(Notification.created_at)).paginate(
        page=page,
        per_page=page_size,
        count=count_pages
    )
def fetch_notification_status_for_service_for_today_and_7_previous_days(service_id, by_template=False, limit_days=7):
    start_date = midnight_n_days_ago(limit_days)
    now = datetime.utcnow()
    stats_for_7_days = db.session.query(
        FactNotificationStatus.notification_type.label('notification_type'),
        FactNotificationStatus.notification_status.label('status'),
        *([FactNotificationStatus.template_id.label('template_id')] if by_template else []),
        FactNotificationStatus.notification_count.label('count')
    ).filter(
        FactNotificationStatus.service_id == service_id,
        FactNotificationStatus.bst_date >= start_date,
        FactNotificationStatus.key_type != KEY_TYPE_TEST
    )

    stats_for_today = db.session.query(
        Notification.notification_type.cast(db.Text),
        Notification.status,
        *([Notification.template_id] if by_template else []),
        func.count().label('count')
    ).filter(
        Notification.created_at >= get_local_timezone_midnight_in_utc(now),
        Notification.service_id == service_id,
        Notification.key_type != KEY_TYPE_TEST
    ).group_by(
        Notification.notification_type,
        *([Notification.template_id] if by_template else []),
        Notification.status
    )

    all_stats_table = stats_for_7_days.union_all(stats_for_today).subquery()

    query = db.session.query(
        *([
            Template.name.label("template_name"),
            Template.is_precompiled_letter,
            all_stats_table.c.template_id
        ] if by_template else []),
        all_stats_table.c.notification_type,
        all_stats_table.c.status,
        func.cast(func.sum(all_stats_table.c.count), Integer).label('count'),
    )

    if by_template:
        query = query.filter(all_stats_table.c.template_id == Template.id)

    return query.group_by(
        *([Template.name, Template.is_precompiled_letter, all_stats_table.c.template_id] if by_template else []),
        all_stats_table.c.notification_type,
        all_stats_table.c.status,
    ).all()
Esempio n. 11
0
def dao_get_inbound_sms_for_service(service_id, user_number=None, *, limit_days=None, limit=None):
    q = InboundSms.query.filter(
        InboundSms.service_id == service_id
    ).order_by(
        InboundSms.created_at.desc()
    )
    if limit_days is not None:
        start_date = midnight_n_days_ago(limit_days)
        q = q.filter(InboundSms.created_at >= start_date)

    if user_number:
        q = q.filter(InboundSms.user_number == user_number)

    if limit:
        q = q.limit(limit)

    return q.all()
Esempio n. 12
0
def get_paginated_jobs(service_id, limit_days, statuses, page):
    pagination = dao_get_jobs_by_service_id(
        service_id,
        limit_days=limit_days,
        page=page,
        page_size=current_app.config["PAGE_SIZE"],
        statuses=statuses,
    )
    data = job_schema.dump(pagination.items, many=True).data
    for job_data in data:
        start = job_data["processing_started"]
        start = dateutil.parser.parse(start).replace(
            tzinfo=None) if start else None

        if start is None:
            statistics = []
        elif start.replace(tzinfo=None) < midnight_n_days_ago(3):
            # ft_notification_status table
            statistics = fetch_notification_statuses_for_job(job_data["id"])
        else:
            # notifications table
            statistics = dao_get_notification_outcomes_for_job(
                service_id, job_data["id"])
        job_data["statistics"] = [{
            "status": statistic.status,
            "count": statistic.count
        } for statistic in statistics]

    return {
        "data":
        data,
        "page_size":
        pagination.per_page,
        "total":
        pagination.total,
        "links":
        pagination_links(pagination,
                         ".get_jobs_by_service",
                         service_id=service_id),
    }
Esempio n. 13
0
def dao_fetch_stats_for_service(service_id, limit_days):
    # We always want between seven and eight days
    start_date = midnight_n_days_ago(limit_days)
    return _stats_for_service_query(service_id).filter(
        Notification.created_at >= start_date
    ).all()
Esempio n. 14
0
def test_midnight_n_days_ago(current_time, arg, expected_datetime):
    with freeze_time(current_time):
        assert midnight_n_days_ago(arg) == expected_datetime
Esempio n. 15
0
def dao_count_inbound_sms_for_service(service_id, limit_days):
    return InboundSms.query.filter(
        InboundSms.service_id == service_id,
        InboundSms.created_at >= midnight_n_days_ago(limit_days)).count()
Esempio n. 16
0
def dao_get_uploads_by_service_id(service_id,
                                  limit_days=None,
                                  page=1,
                                  page_size=50):
    # Hardcoded filter to exclude cancelled or scheduled jobs
    # for the moment, but we may want to change this method take 'statuses' as a argument in the future
    today = datetime.utcnow().date()
    jobs_query_filter = [
        Job.service_id == service_id,
        Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
        Job.original_file_name !=
        current_app.config['ONE_OFF_MESSAGE_FILENAME'],
        Job.job_status.notin_([JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED]),
        func.coalesce(Job.processing_started, Job.created_at) >=
        today - func.coalesce(ServiceDataRetention.days_of_retention, 7),
        Job.contact_list_id.is_(None),
    ]
    if limit_days is not None:
        jobs_query_filter.append(
            Job.created_at >= midnight_n_days_ago(limit_days))

    jobs_query = db.session.query(
        Job.id,
        Job.original_file_name,
        Job.notification_count,
        Template.template_type,
        func.coalesce(ServiceDataRetention.days_of_retention,
                      7).label('days_of_retention'),
        Job.created_at.label("created_at"),
        Job.scheduled_for.label("scheduled_for"),
        Job.processing_started.label('processing_started'),
        Job.job_status.label("status"),
        literal('job').label('upload_type'),
        literal(None).label('recipient'),
    ).join(Template, Job.template_id == Template.id).outerjoin(
        ServiceDataRetention,
        and_(
            Template.service_id == ServiceDataRetention.service_id,
            func.cast(Template.template_type, String) == func.cast(
                ServiceDataRetention.notification_type,
                String))).filter(*jobs_query_filter)

    letters_query_filter = [
        Notification.service_id == service_id,
        Notification.notification_type == LETTER_TYPE,
        Notification.api_key_id == None,  # noqa
        Notification.status != NOTIFICATION_CANCELLED,
        Template.hidden == True,
        Notification.created_at >=
        today - func.coalesce(ServiceDataRetention.days_of_retention, 7)
    ]
    if limit_days is not None:
        letters_query_filter.append(
            Notification.created_at >= midnight_n_days_ago(limit_days))

    letters_subquery = db.session.query(
        func.count().label('notification_count'),
        _naive_gmt_to_utc(_get_printing_datetime(
            Notification.created_at)).label('printing_at'),
    ).join(Template, Notification.template_id == Template.id).outerjoin(
        ServiceDataRetention,
        and_(
            Template.service_id == ServiceDataRetention.service_id,
            func.cast(Template.template_type, String) == func.cast(
                ServiceDataRetention.notification_type, String))).filter(
                    *letters_query_filter).group_by('printing_at').subquery()

    letters_query = db.session.query(
        literal(None).label('id'),
        literal('Uploaded letters').label('original_file_name'),
        letters_subquery.c.notification_count.label('notification_count'),
        literal('letter').label('template_type'),
        literal(None).label('days_of_retention'),
        letters_subquery.c.printing_at.label('created_at'),
        literal(None).label('scheduled_for'),
        letters_subquery.c.printing_at.label('processing_started'),
        literal(None).label('status'),
        literal('letter_day').label('upload_type'),
        literal(None).label('recipient'),
    ).group_by(
        letters_subquery.c.notification_count,
        letters_subquery.c.printing_at,
    )

    return jobs_query.union_all(letters_query).order_by(
        desc("processing_started"),
        desc("created_at")).paginate(page=page, per_page=page_size)