def test_should_update_scheduled_jobs_and_put_on_queue(mocker,
                                                       sample_template):
    mocked = mocker.patch('app.celery.tasks.process_job.apply_async')

    one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1)
    job = create_job(sample_template,
                     job_status='scheduled',
                     scheduled_for=one_minute_in_the_past)

    run_scheduled_jobs()

    updated_job = dao_get_job_by_id(job.id)
    assert updated_job.job_status == 'pending'
    mocked.assert_called_with([str(job.id)], queue="job-tasks")
def test_check_job_status_task_calls_process_incomplete_jobs_when_scheduled_job_is_not_complete(
        mocker, sample_template):
    mock_celery = mocker.patch(
        'app.celery.tasks.process_incomplete_jobs.apply_async')
    job = create_job(template=sample_template,
                     notification_count=3,
                     created_at=datetime.utcnow() - timedelta(hours=2),
                     scheduled_for=datetime.utcnow() - timedelta(minutes=31),
                     processing_started=datetime.utcnow() -
                     timedelta(minutes=31),
                     job_status=JOB_STATUS_IN_PROGRESS)
    check_job_status()

    mock_celery.assert_called_once_with([[str(job.id)]], queue=QueueNames.JOBS)
Ejemplo n.º 3
0
def test_should_return_notifications_only_for_this_service(
        sample_notification_with_job):
    other_service = create_service(service_name='one')
    other_template = create_template(service=other_service)
    other_job = create_job(other_template)

    create_notification(other_template, job=other_job)

    assert len(
        dao_get_notification_outcomes_for_job(
            sample_notification_with_job.service_id, other_job.id)) == 0
    assert len(
        dao_get_notification_outcomes_for_job(
            other_service.id, sample_notification_with_job.id)) == 0
Ejemplo n.º 4
0
def test_find_jobs_with_missing_rows_returns_nothing_for_a_job_completed_more_that_a_day_ago(
        sample_email_template
):
    job = create_job(template=sample_email_template,
                     notification_count=5,
                     job_status=JOB_STATUS_FINISHED,
                     processing_finished=datetime.utcnow() - timedelta(days=1)
                     )
    for i in range(0, 4):
        create_notification(job=job, job_row_number=i)

    results = find_jobs_with_missing_rows()

    assert len(results) == 0
def test_will_remove_csv_files_for_jobs_older_than_retention_period(
        notify_db, notify_db_session, mocker):
    """
    Jobs older than retention period are deleted, but only two day's worth (two-day window)
    """
    mocker.patch('app.celery.nightly_tasks.s3.remove_job_from_s3')
    service_1 = create_service(service_name='service 1')
    service_2 = create_service(service_name='service 2')
    create_service_data_retention(service=service_1,
                                  notification_type=SMS_TYPE,
                                  days_of_retention=3)
    create_service_data_retention(service=service_2,
                                  notification_type=EMAIL_TYPE,
                                  days_of_retention=30)
    sms_template_service_1 = create_template(service=service_1)
    email_template_service_1 = create_template(service=service_1,
                                               template_type='email')

    sms_template_service_2 = create_template(service=service_2)
    email_template_service_2 = create_template(service=service_2,
                                               template_type='email')

    four_days_ago = datetime.utcnow() - timedelta(days=4)
    eight_days_ago = datetime.utcnow() - timedelta(days=8)
    thirty_one_days_ago = datetime.utcnow() - timedelta(days=31)

    job1_to_delete = create_job(sms_template_service_1,
                                created_at=four_days_ago)
    job2_to_delete = create_job(email_template_service_1,
                                created_at=eight_days_ago)
    create_job(email_template_service_1, created_at=four_days_ago)

    create_job(email_template_service_2, created_at=eight_days_ago)
    job3_to_delete = create_job(email_template_service_2,
                                created_at=thirty_one_days_ago)
    job4_to_delete = create_job(sms_template_service_2,
                                created_at=eight_days_ago)

    remove_sms_email_csv_files()

    s3.remove_job_from_s3.assert_has_calls([
        call(job1_to_delete.service_id, job1_to_delete.id),
        call(job2_to_delete.service_id, job2_to_delete.id),
        call(job3_to_delete.service_id, job3_to_delete.id),
        call(job4_to_delete.service_id, job4_to_delete.id)
    ],
                                           any_order=True)
def test_check_job_status_task_only_sends_old_tasks(mocker, sample_template):
    mock_celery = mocker.patch(
        'app.celery.tasks.process_incomplete_jobs.apply_async')
    job = create_job(template=sample_template,
                     notification_count=3,
                     created_at=datetime.utcnow() - timedelta(hours=2),
                     scheduled_for=datetime.utcnow() - timedelta(minutes=31),
                     processing_started=datetime.utcnow() -
                     timedelta(minutes=31),
                     job_status=JOB_STATUS_IN_PROGRESS)
    create_job(template=sample_template,
               notification_count=3,
               created_at=datetime.utcnow() - timedelta(minutes=31),
               processing_started=datetime.utcnow() - timedelta(minutes=29),
               job_status=JOB_STATUS_IN_PROGRESS)
    create_job(template=sample_template,
               notification_count=3,
               created_at=datetime.utcnow() - timedelta(minutes=50),
               scheduled_for=datetime.utcnow() - timedelta(minutes=29),
               job_status=JOB_STATUS_PENDING)
    check_job_status()

    # jobs 2 and 3 were created less than 30 minutes ago, so are not sent to Celery task
    mock_celery.assert_called_once_with([[str(job.id)]], queue=QueueNames.JOBS)
def test_get_contact_list_counts_jobs(
    sample_template,
    admin_request,
    days_of_email_retention,
    expected_job_count,
):
    if days_of_email_retention:
        create_service_data_retention(sample_template.service, 'email', days_of_email_retention)

    # This should be ignored because it’s another template type
    create_service_data_retention(sample_template.service, 'sms', 1)

    contact_list_1 = create_service_contact_list(service=sample_template.service)
    contact_list_2 = create_service_contact_list(service=sample_template.service)

    for i in range(10):
        create_job(
            template=sample_template,
            contact_list_id=contact_list_2.id,
            created_at=datetime.utcnow() - timedelta(days=i)
        )

    response = admin_request.get(
        'service.get_contact_list',
        service_id=contact_list_1.service_id
    )

    assert len(response) == 2

    assert response[0]['id'] == str(contact_list_2.id)
    assert response[0]['recent_job_count'] == expected_job_count
    assert response[0]['has_jobs'] is True

    assert response[1]['id'] == str(contact_list_1.id)
    assert response[1]['recent_job_count'] == 0
    assert response[1]['has_jobs'] is False
Ejemplo n.º 8
0
def test_check_job_status_task_only_sends_old_tasks(mocker, sample_template):
    mock_celery = mocker.patch('app.celery.tasks.notify_celery.send_task')
    job = create_job(template=sample_template,
                     notification_count=3,
                     created_at=datetime.utcnow() - timedelta(hours=2),
                     scheduled_for=datetime.utcnow() - timedelta(minutes=31),
                     processing_started=datetime.utcnow() -
                     timedelta(minutes=31),
                     job_status=JOB_STATUS_IN_PROGRESS)
    job_2 = create_job(template=sample_template,
                       notification_count=3,
                       created_at=datetime.utcnow() - timedelta(minutes=31),
                       processing_started=datetime.utcnow() -
                       timedelta(minutes=29),
                       job_status=JOB_STATUS_IN_PROGRESS)
    with pytest.raises(expected_exception=JobIncompleteError) as e:
        check_job_status()
    assert str(job.id) in e.value.message
    assert str(job_2.id) not in e.value.message

    # job 2 not in celery task
    mock_celery.assert_called_once_with(name=TaskNames.PROCESS_INCOMPLETE_JOBS,
                                        args=([str(job.id)], ),
                                        queue=QueueNames.JOBS)
Ejemplo n.º 9
0
def test_find_missing_row_for_job_more_than_one_missing_row(
        sample_email_template):
    job = create_job(template=sample_email_template,
                     notification_count=5,
                     job_status=JOB_STATUS_FINISHED,
                     processing_finished=datetime.utcnow() -
                     timedelta(minutes=11))
    create_notification(job=job, job_row_number=0)
    create_notification(job=job, job_row_number=1)
    create_notification(job=job, job_row_number=4)

    results = find_missing_row_for_job(job.id, 5)
    assert len(results) == 2
    assert results[0].missing_row == 2
    assert results[1].missing_row == 3
def test_check_job_status_task_raises_job_incomplete_error_when_scheduled_job_is_not_complete(mocker, sample_template):
    mock_celery = mocker.patch('app.celery.tasks.notify_celery.send_task')
    job = create_job(template=sample_template, notification_count=3,
                     created_at=datetime.utcnow() - timedelta(hours=2),
                     scheduled_for=datetime.utcnow() - timedelta(minutes=31),
                     processing_started=datetime.utcnow() - timedelta(minutes=31),
                     job_status=JOB_STATUS_IN_PROGRESS)
    with pytest.raises(expected_exception=JobIncompleteError) as e:
        check_job_status()
    assert e.value.message == "Job(s) ['{}'] have not completed.".format(str(job.id))

    mock_celery.assert_called_once_with(
        name=TaskNames.PROCESS_INCOMPLETE_JOBS,
        args=([str(job.id)],),
        queue=QueueNames.JOBS
    )
Ejemplo n.º 11
0
def test_can_letter_job_be_cancelled_returns_false_and_error_message_if_letters_already_sent_to_dvla(
        sample_letter_template):
    with freeze_time('2019-06-13 13:00'):
        job = create_job(template=sample_letter_template,
                         notification_count=1,
                         job_status='finished')
        letter = create_notification(template=job.template,
                                     job=job,
                                     status='created')

    with freeze_time('2019-06-13 17:32'):
        result, errors = can_letter_job_be_cancelled(job)
    assert not result
    assert errors == "It’s too late to cancel sending, these letters have already been sent."
    assert letter.status == 'created'
    assert job.job_status == 'finished'
Ejemplo n.º 12
0
def sample_notification_with_job(notify_db_session):
    service = create_service(check_if_service_exists=True)
    template = create_template(service=service)
    job = create_job(template=template)
    return create_notification(template=template,
                               job=job,
                               job_row_number=None,
                               to_field=None,
                               status='created',
                               reference=None,
                               created_at=None,
                               sent_at=None,
                               billable_units=1,
                               personalisation=None,
                               api_key=None,
                               key_type=KEY_TYPE_NORMAL)
def test_check_for_missing_rows_in_completed_jobs_uses_sender_id(mocker, sample_email_template, fake_uuid):
    mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3',
                 return_value=(load_example_csv('multiple_email'), {'sender_id': fake_uuid}))
    mock_process_row = mocker.patch('app.celery.scheduled_tasks.process_row')

    job = create_job(template=sample_email_template,
                     notification_count=5,
                     job_status=JOB_STATUS_FINISHED,
                     processing_finished=datetime.utcnow() - timedelta(minutes=11))
    for i in range(0, 4):
        create_notification(job=job, job_row_number=i)

    check_for_missing_rows_in_completed_jobs()
    mock_process_row.assert_called_once_with(
        mock.ANY, mock.ANY, job, job.service, sender_id=fake_uuid
    )
Ejemplo n.º 14
0
def test_should_get_jobs_seven_days_old(sample_template):
    """
    Jobs older than seven days are deleted, but only two day's worth (two-day window)
    """
    seven_days_ago = datetime.utcnow() - timedelta(days=7)
    within_seven_days = seven_days_ago + timedelta(seconds=1)

    eight_days_ago = seven_days_ago - timedelta(days=1)

    nine_days_ago = eight_days_ago - timedelta(days=2)
    nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1)

    create_job(sample_template, created_at=seven_days_ago)
    create_job(sample_template, created_at=within_seven_days)
    job_to_delete = create_job(sample_template, created_at=eight_days_ago)
    create_job(sample_template, created_at=nine_days_ago, archived=True)
    create_job(sample_template, created_at=nine_days_one_second_ago, archived=True)

    jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type])

    assert len(jobs) == 1
    assert jobs[0].id == job_to_delete.id
Ejemplo n.º 15
0
def test_cancel_letter_job_updates_notifications_and_job_to_cancelled(sample_letter_template, admin_request, mocker):
    job = create_job(template=sample_letter_template, notification_count=1, job_status='finished')
    create_notification(template=job.template, job=job, status='created')

    mock_get_job = mocker.patch('app.job.rest.dao_get_job_by_service_id_and_job_id', return_value=job)
    mock_can_letter_job_be_cancelled = mocker.patch(
        'app.job.rest.can_letter_job_be_cancelled', return_value=(True, None)
    )
    mock_dao_cancel_letter_job = mocker.patch('app.job.rest.dao_cancel_letter_job', return_value=1)

    response = admin_request.post(
        'job.cancel_letter_job',
        service_id=job.service_id,
        job_id=job.id,
    )

    mock_get_job.assert_called_once_with(job.service_id, str(job.id))
    mock_can_letter_job_be_cancelled.assert_called_once_with(job)
    mock_dao_cancel_letter_job.assert_called_once_with(job)

    assert response == 1
Ejemplo n.º 16
0
def test_get_job_by_id_with_stats_for_old_job_where_notifications_have_been_purged(
        admin_request, sample_template):
    old_job = create_job(sample_template,
                         notification_count=10,
                         created_at=datetime.utcnow() - timedelta(days=9),
                         job_status='finished')

    def __create_ft_status(job, status, count):
        create_ft_notification_status(bst_date=job.created_at.date(),
                                      notification_type='sms',
                                      service=job.service,
                                      job=job,
                                      template=job.template,
                                      key_type='normal',
                                      notification_status=status,
                                      count=count)

    __create_ft_status(old_job, 'created', 3)
    __create_ft_status(old_job, 'sending', 1)
    __create_ft_status(old_job, 'failed', 3)
    __create_ft_status(old_job, 'technical-failure', 1)
    __create_ft_status(old_job, 'temporary-failure', 2)

    resp_json = admin_request.get('job.get_job_by_service_and_job_id',
                                  service_id=old_job.service_id,
                                  job_id=old_job.id)

    assert resp_json['data']['id'] == str(old_job.id)
    assert {'status': 'created', 'count': 3} in resp_json['data']['statistics']
    assert {'status': 'sending', 'count': 1} in resp_json['data']['statistics']
    assert {'status': 'failed', 'count': 3} in resp_json['data']['statistics']
    assert {
        'status': 'technical-failure',
        'count': 1
    } in resp_json['data']['statistics']
    assert {
        'status': 'temporary-failure',
        'count': 2
    } in resp_json['data']['statistics']
    assert resp_json['data']['created_by']['name'] == 'Test User'
def test_check_for_missing_rows_in_completed_jobs_ignores_old_and_new_jobs(
    mocker,
    sample_email_template,
    offset,
):
    mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3',
                 return_value=(load_example_csv('multiple_email'), {"sender_id": None}))
    mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
    process_row = mocker.patch('app.celery.scheduled_tasks.process_row')

    job = create_job(
        template=sample_email_template,
        notification_count=5,
        job_status=JOB_STATUS_FINISHED,
        processing_finished=datetime.utcnow() - offset,
    )
    for i in range(0, 4):
        create_notification(job=job, job_row_number=i)

    check_for_missing_rows_in_completed_jobs()

    assert process_row.called is False
Ejemplo n.º 18
0
def sample_notification_with_job(
    notify_db,
    notify_db_session,
    service=None,
    template=None,
    job=None,
    job_row_number=None,
    to_field=None,
    status="created",
    reference=None,
    created_at=None,
    sent_at=None,
    billable_units=1,
    personalisation=None,
    api_key=None,
    key_type=KEY_TYPE_NORMAL,
):
    if not service:
        service = create_service()
    if not template:
        template = create_template(service=service)
    if job is None:
        job = create_job(template=template)
    return create_notification(
        template=template,
        job=job,
        job_row_number=job_row_number if job_row_number is not None else None,
        to_field=to_field,
        status=status,
        reference=reference,
        created_at=created_at,
        sent_at=sent_at,
        billable_units=billable_units,
        personalisation=personalisation,
        api_key=api_key,
        key_type=key_type,
    )
def test_check_for_missing_rows_in_completed_jobs_calls_save_email(mocker, sample_email_template):
    mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3',
                 return_value=(load_example_csv('multiple_email'), {'sender_id': None}))
    save_email_task = mocker.patch('app.celery.tasks.save_email.apply_async')
    mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
    mocker.patch('app.celery.tasks.create_uuid', return_value='uuid')

    job = create_job(template=sample_email_template,
                     notification_count=5,
                     job_status=JOB_STATUS_FINISHED,
                     processing_finished=datetime.utcnow() - timedelta(minutes=11))
    for i in range(0, 4):
        create_notification(job=job, job_row_number=i)

    check_for_missing_rows_in_completed_jobs()
    save_email_task.assert_called_once_with(
        (
            str(job.service_id),
            "uuid",
            "something_encrypted",
        ),
        {},
        queue="database-tasks"
    )
Ejemplo n.º 20
0
def test_get_uploads_for_service(sample_template):
    create_service_data_retention(sample_template.service,
                                  'sms',
                                  days_of_retention=9)
    job = create_job(sample_template, processing_started=datetime.utcnow())
    letter_template = create_uploaded_template(sample_template.service)
    letter = create_uploaded_letter(letter_template, sample_template.service)

    other_service = create_service(service_name="other service")
    other_template = create_template(service=other_service)
    other_job = create_job(other_template,
                           processing_started=datetime.utcnow())
    other_letter_template = create_uploaded_template(other_service)
    create_uploaded_letter(other_letter_template, other_service)

    uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items
    other_uploads_from_db = dao_get_uploads_by_service_id(
        other_job.service_id).items

    assert len(uploads_from_db) == 2

    assert uploads_from_db[0] == (
        None,
        'Uploaded letters',
        1,
        'letter',
        None,
        letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
        None,
        letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
        None,
        'letter_day',
        None,
    )
    assert uploads_from_db[1] == (
        job.id,
        job.original_file_name,
        job.notification_count,
        'sms',
        9,
        job.created_at,
        job.scheduled_for,
        job.processing_started,
        job.job_status,
        "job",
        None,
    )

    assert len(other_uploads_from_db) == 2
    assert other_uploads_from_db[0] == (
        None,
        'Uploaded letters',
        1,
        'letter',
        None,
        letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
        None,
        letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
        None,
        "letter_day",
        None,
    )
    assert other_uploads_from_db[1] == (other_job.id,
                                        other_job.original_file_name,
                                        other_job.notification_count,
                                        other_job.template.template_type, 7,
                                        other_job.created_at,
                                        other_job.scheduled_for,
                                        other_job.processing_started,
                                        other_job.job_status, "job", None)

    assert uploads_from_db[1] != other_uploads_from_db[1]
Ejemplo n.º 21
0
def sample_scheduled_job(sample_template_with_placeholders):
    return create_job(sample_template_with_placeholders,
                      job_status='scheduled',
                      scheduled_for=(datetime.utcnow() +
                                     timedelta(minutes=60)).isoformat())
Ejemplo n.º 22
0
def test_get_jobs_should_retrieve_from_ft_notification_status_for_old_jobs(
        admin_request, sample_template):
    # it's the 10th today, so 3 days should include all of 7th, 8th, 9th, and some of 10th.
    just_three_days_ago = datetime(2017, 6, 6, 22, 59, 59)
    not_quite_three_days_ago = just_three_days_ago + timedelta(seconds=1)

    job_1 = create_job(sample_template,
                       created_at=just_three_days_ago,
                       processing_started=just_three_days_ago)
    job_2 = create_job(sample_template,
                       created_at=just_three_days_ago,
                       processing_started=not_quite_three_days_ago)
    # is old but hasn't started yet (probably a scheduled job). We don't have any stats for this job yet.
    job_3 = create_job(sample_template,
                       created_at=just_three_days_ago,
                       processing_started=None)

    # some notifications created more than three days ago, some created after the midnight cutoff
    create_ft_notification_status(date(2017, 6, 6),
                                  job=job_1,
                                  notification_status='delivered',
                                  count=2)
    create_ft_notification_status(date(2017, 6, 7),
                                  job=job_1,
                                  notification_status='delivered',
                                  count=4)
    # job2's new enough
    create_notification(job=job_2,
                        status='created',
                        created_at=not_quite_three_days_ago)

    # this isn't picked up because the job is too new
    create_ft_notification_status(date(2017, 6, 7),
                                  job=job_2,
                                  notification_status='delivered',
                                  count=8)
    # this isn't picked up - while the job is old, it started in last 3 days so we look at notification table instead
    create_ft_notification_status(date(2017, 6, 7),
                                  job=job_3,
                                  notification_status='delivered',
                                  count=16)

    # this isn't picked up because we're using the ft status table for job_1 as it's old
    create_notification(job=job_1,
                        status='created',
                        created_at=not_quite_three_days_ago)

    resp_json = admin_request.get('job.get_jobs_by_service',
                                  service_id=sample_template.service_id)

    assert resp_json['data'][0]['id'] == str(job_3.id)
    assert resp_json['data'][0]['statistics'] == []
    assert resp_json['data'][1]['id'] == str(job_2.id)
    assert resp_json['data'][1]['statistics'] == [{
        'status': 'created',
        'count': 1
    }]
    assert resp_json['data'][2]['id'] == str(job_1.id)
    assert resp_json['data'][2]['statistics'] == [{
        'status': 'delivered',
        'count': 6
    }]
Ejemplo n.º 23
0
def create_10_jobs(template):
    with freeze_time('2015-01-01T00:00:00') as the_time:
        for _ in range(10):
            the_time.tick(timedelta(hours=1))
            create_job(template)
Ejemplo n.º 24
0
def test_get_jobs_can_filter_on_statuses(admin_request, sample_template,
                                         statuses_filter, expected_statuses):
    create_job(sample_template, job_status='pending')
    create_job(sample_template, job_status='in progress')
    create_job(sample_template, job_status='finished')
    create_job(sample_template, job_status='sending limits exceeded')
    create_job(sample_template, job_status='scheduled')
    create_job(sample_template, job_status='cancelled')
    create_job(sample_template, job_status='ready to send')
    create_job(sample_template, job_status='sent to dvla')
    create_job(sample_template, job_status='error')

    resp_json = admin_request.get('job.get_jobs_by_service',
                                  service_id=sample_template.service_id,
                                  statuses=statuses_filter)

    assert {x['job_status']
            for x in resp_json['data']} == set(expected_statuses)
Ejemplo n.º 25
0
def _setup_jobs(template, number_of_jobs=5):
    for i in range(number_of_jobs):
        create_job(template=template)
Ejemplo n.º 26
0
def test_get_uploads(admin_request, sample_template):
    letter_template = create_precompiled_template(sample_template.service)

    upload_1 = create_uploaded_letter(letter_template,
                                      sample_template.service,
                                      status='delivered',
                                      created_at=datetime.utcnow() -
                                      timedelta(minutes=4))
    upload_2 = create_job(template=sample_template,
                          processing_started=datetime.utcnow() -
                          timedelta(minutes=3),
                          job_status=JOB_STATUS_FINISHED)
    upload_3 = create_uploaded_letter(letter_template,
                                      sample_template.service,
                                      status='delivered',
                                      created_at=datetime.utcnow() -
                                      timedelta(minutes=2))
    upload_4 = create_job(template=sample_template,
                          processing_started=datetime.utcnow() -
                          timedelta(minutes=1),
                          job_status=JOB_STATUS_FINISHED)
    upload_5 = create_job(template=sample_template,
                          processing_started=None,
                          job_status=JOB_STATUS_PENDING,
                          notification_count=10)

    service_id = sample_template.service.id

    resp_json = admin_request.get('upload.get_uploads_by_service',
                                  service_id=service_id)
    data = resp_json['data']
    assert len(data) == 5
    assert data[0] == {
        'id': str(upload_5.id),
        'original_file_name': 'some.csv',
        'notification_count': 10,
        'created_at': upload_5.created_at.strftime("%Y-%m-%d %H:%M:%S"),
        'statistics': [],
        'upload_type': 'job'
    }
    assert data[1] == {
        'id': str(upload_4.id),
        'original_file_name': 'some.csv',
        'notification_count': 1,
        'created_at': upload_4.created_at.strftime("%Y-%m-%d %H:%M:%S"),
        'statistics': [],
        'upload_type': 'job'
    }
    assert data[2] == {
        'id': str(upload_3.id),
        'original_file_name': "file-name",
        'notification_count': 1,
        'created_at': upload_3.created_at.strftime("%Y-%m-%d %H:%M:%S"),
        'statistics': [{
            'count': 1,
            'status': 'delivered'
        }],
        'upload_type': 'letter'
    }
    assert data[3] == {
        'id': str(upload_2.id),
        'original_file_name': "some.csv",
        'notification_count': 1,
        'created_at': upload_2.created_at.strftime("%Y-%m-%d %H:%M:%S"),
        'statistics': [],
        'upload_type': 'job'
    }
    assert data[4] == {
        'id': str(upload_1.id),
        'original_file_name': "file-name",
        'notification_count': 1,
        'created_at': upload_1.created_at.strftime("%Y-%m-%d %H:%M:%S"),
        'statistics': [{
            'count': 1,
            'status': 'delivered'
        }],
        'upload_type': 'letter'
    }
Ejemplo n.º 27
0
def test_unique_key_on_job_id_and_job_row_number_no_error_if_row_number_for_different_job(
        sample_email_template):
    job_1 = create_job(template=sample_email_template)
    job_2 = create_job(template=sample_email_template)
    create_notification(job=job_1, job_row_number=0)
    create_notification(job=job_2, job_row_number=0)
Ejemplo n.º 28
0
def test_unique_key_on_job_id_and_job_row_number(sample_email_template):
    job = create_job(template=sample_email_template)
    create_notification(job=job, job_row_number=0)
    with pytest.raises(expected_exception=IntegrityError):
        create_notification(job=job, job_row_number=0)
Ejemplo n.º 29
0
def test_get_uploads_only_gets_uploads_within_service_retention_period(
        sample_template):
    letter_template = create_uploaded_template(sample_template.service)
    create_service_data_retention(sample_template.service,
                                  'sms',
                                  days_of_retention=3)

    days_ago = datetime.utcnow() - timedelta(days=4)
    upload_1 = create_uploaded_letter(letter_template,
                                      service=letter_template.service)
    upload_2 = create_job(sample_template,
                          processing_started=datetime.utcnow() -
                          timedelta(days=1),
                          created_at=days_ago,
                          job_status=JOB_STATUS_IN_PROGRESS)
    # older than custom retention for sms:
    create_job(sample_template,
               processing_started=datetime.utcnow() - timedelta(days=5),
               created_at=days_ago,
               job_status=JOB_STATUS_IN_PROGRESS)
    upload_3 = create_uploaded_letter(letter_template,
                                      service=letter_template.service,
                                      created_at=datetime.utcnow() -
                                      timedelta(days=3))

    # older than retention for sms but within letter retention:
    upload_4 = create_uploaded_letter(letter_template,
                                      service=letter_template.service,
                                      created_at=datetime.utcnow() -
                                      timedelta(days=6))

    # older than default retention for letters:
    create_uploaded_letter(letter_template,
                           service=letter_template.service,
                           created_at=datetime.utcnow() - timedelta(days=8))

    results = dao_get_uploads_by_service_id(
        service_id=sample_template.service_id).items

    assert len(results) == 4

    # Uploaded letters get their `created_at` shifted time of printing
    # 17:30 BST == 16:30 UTC
    assert results[0].created_at == upload_1.created_at.replace(hour=16,
                                                                minute=30,
                                                                second=0,
                                                                microsecond=0)

    # Jobs keep their original `created_at`
    assert results[1].created_at == upload_2.created_at.replace(hour=14,
                                                                minute=00,
                                                                second=0,
                                                                microsecond=0)

    # Still in BST here…
    assert results[2].created_at == upload_3.created_at.replace(hour=16,
                                                                minute=30,
                                                                second=0,
                                                                microsecond=0)

    # Now we’ve gone far enough back to be in GMT
    # 17:30 GMT == 17:30 UTC
    assert results[3].created_at == upload_4.created_at.replace(hour=17,
                                                                minute=30,
                                                                second=0,
                                                                microsecond=0)
Ejemplo n.º 30
0
def test_get_scheduled_job_stats(admin_request):

    service_1 = create_service(service_name='service 1')
    service_1_template = create_template(service=service_1)
    service_2 = create_service(service_name='service 2')
    service_2_template = create_template(service=service_2)

    # Shouldn’t be counted – wrong status
    create_job(service_1_template, job_status='finished', scheduled_for='2017-07-17 07:00')
    create_job(service_1_template, job_status='in progress', scheduled_for='2017-07-17 08:00')

    # Should be counted – service 1
    create_job(service_1_template, job_status='scheduled', scheduled_for='2017-07-17 09:00')
    create_job(service_1_template, job_status='scheduled', scheduled_for='2017-07-17 10:00')
    create_job(service_1_template, job_status='scheduled', scheduled_for='2017-07-17 11:00')

    # Should be counted – service 2
    create_job(service_2_template, job_status='scheduled', scheduled_for='2017-07-17 11:00')

    assert admin_request.get(
        'job.get_scheduled_job_stats',
        service_id=service_1.id,
    ) == {
        'count': 3,
        'soonest_scheduled_for': '2017-07-17T09:00:00+00:00',
    }

    assert admin_request.get(
        'job.get_scheduled_job_stats',
        service_id=service_2.id,
    ) == {
        'count': 1,
        'soonest_scheduled_for': '2017-07-17T11:00:00+00:00',
    }