def test_should_update_all_scheduled_jobs_and_put_on_queue( sample_template, mocker): mocked = mocker.patch('app.celery.tasks.process_job.apply_async') one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) ten_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=10) twenty_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=20) job_1 = create_job(sample_template, job_status='scheduled', scheduled_for=one_minute_in_the_past) job_2 = create_job(sample_template, job_status='scheduled', scheduled_for=ten_minutes_in_the_past) job_3 = create_job(sample_template, job_status='scheduled', scheduled_for=twenty_minutes_in_the_past) run_scheduled_jobs() assert dao_get_job_by_id(job_1.id).job_status == 'pending' assert dao_get_job_by_id(job_2.id).job_status == 'pending' assert dao_get_job_by_id(job_2.id).job_status == 'pending' mocked.assert_has_calls([ call([str(job_3.id)], queue="job-tasks"), call([str(job_2.id)], queue="job-tasks"), call([str(job_1.id)], queue="job-tasks") ])
def test_should_process_sms_job(sample_job, mocker): mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms')) mocker.patch('app.celery.tasks.send_sms.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") process_job(sample_job.id) s3.get_job_from_s3.assert_called_once_with( str(sample_job.service.id), str(sample_job.id) ) assert encryption.encrypt.call_args[0][0]['to'] == '+441234123123' assert encryption.encrypt.call_args[0][0]['template'] == str(sample_job.template.id) assert encryption.encrypt.call_args[0][0]['template_version'] == sample_job.template.version assert encryption.encrypt.call_args[0][0]['personalisation'] == {'phonenumber': '+441234123123'} assert encryption.encrypt.call_args[0][0]['row_number'] == 0 tasks.send_sms.apply_async.assert_called_once_with( (str(sample_job.service_id), "uuid", "something_encrypted", "2016-01-01T11:09:00.061258Z"), queue="db-sms" ) job = jobs_dao.dao_get_job_by_id(sample_job.id) assert job.job_status == 'finished'
def process_job(job_id, sender_id=None): start = datetime.utcnow() job = dao_get_job_by_id(job_id) current_app.logger.info("Starting process-job task for job id {} with status: {}".format(job_id, job.job_status)) if job.job_status != JOB_STATUS_PENDING: return service = job.service if not service.active: job.job_status = JOB_STATUS_CANCELLED dao_update_job(job) current_app.logger.warning( "Job {} has been cancelled, service {} is inactive".format(job_id, service.id)) return if __sending_limits_for_job_exceeded(service, job, job_id): return job.job_status = JOB_STATUS_IN_PROGRESS job.processing_started = start dao_update_job(job) recipient_csv, template, sender_id = get_recipient_csv_and_template_and_sender_id(job) current_app.logger.info("Starting job {} processing {} notifications".format(job_id, job.notification_count)) for row in recipient_csv.get_rows(): process_row(row, template, job, service, sender_id=sender_id) job_complete(job, start=start)
def process_incomplete_job(job_id): job = dao_get_job_by_id(job_id) last_notification_added = dao_get_last_notification_added_for_job_id(job_id) if last_notification_added: resume_from_row = last_notification_added.job_row_number else: resume_from_row = -1 # The first row in the csv with a number is row 0 current_app.logger.info("Resuming job {} from row {}".format(job_id, resume_from_row)) db_template = dao_get_template_by_id(job.template_id, job.template_version) TemplateClass = get_template_class(db_template.template_type) template = TemplateClass(db_template.__dict__) for row in RecipientCSV( s3.get_job_from_s3(str(job.service_id), str(job.id)), template_type=template.template_type, placeholders=template.placeholders ).rows: if row.index > resume_from_row: process_row(row, template, job, job.service) job_complete(job, resumed=True)
def test_should_process_email_job_if_exactly_on_send_limits(notify_db, notify_db_session, mocker): service = sample_service(notify_db, notify_db_session, limit=10) template = sample_email_template(notify_db, notify_db_session, service=service) job = sample_job(notify_db, notify_db_session, service=service, template=template, notification_count=10) mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_email')) mocker.patch('app.celery.tasks.send_email.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") process_job(job.id) s3.get_job_from_s3.assert_called_once_with( str(job.service.id), str(job.id) ) job = jobs_dao.dao_get_job_by_id(job.id) assert job.job_status == 'finished' tasks.send_email.apply_async.assert_called_with( ( str(job.service_id), "uuid", "something_encrypted", "2016-01-01T11:09:00.061258Z" ), queue="db-email" )
def test_should_update_scheduled_jobs_and_put_on_queue(mocker, sample_template): mocked = mocker.patch('app.celery.tasks.process_job.apply_async') one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) job = create_job(sample_template, job_status='scheduled', scheduled_for=one_minute_in_the_past) run_scheduled_jobs() updated_job = dao_get_job_by_id(job.id) assert updated_job.job_status == 'pending' mocked.assert_called_with([str(job.id)], queue="job-tasks")
def process_job(job_id): start = datetime.utcnow() job = dao_get_job_by_id(job_id) if job.job_status != "pending": return service = job.service if __sending_limits_for_job_exceeded(service, job, job_id): return job.job_status = "in progress" dao_update_job(job) template = Template(dao_get_template_by_id(job.template_id, job.template_version).__dict__) for row_number, recipient, personalisation in RecipientCSV( s3.get_job_from_s3(str(service.id), str(job_id)), template_type=template.template_type, placeholders=template.placeholders, ).enumerated_recipients_and_personalisation: encrypted = encryption.encrypt( { "template": str(template.id), "template_version": job.template_version, "job": str(job.id), "to": recipient, "row_number": row_number, "personalisation": dict(personalisation), } ) if template.template_type == SMS_TYPE: send_sms.apply_async( (str(job.service_id), create_uuid(), encrypted, datetime.utcnow().strftime(DATETIME_FORMAT)), queue="db-sms" if not service.research_mode else "research-mode", ) if template.template_type == EMAIL_TYPE: send_email.apply_async( (str(job.service_id), create_uuid(), encrypted, datetime.utcnow().strftime(DATETIME_FORMAT)), queue="db-email" if not service.research_mode else "research-mode", ) finished = datetime.utcnow() job.job_status = "finished" job.processing_started = start job.processing_finished = finished dao_update_job(job) current_app.logger.info( "Job {} created at {} started at {} finished at {}".format(job_id, job.created_at, start, finished) )
def test_should_update_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): mocked = mocker.patch('app.celery.tasks.process_job.apply_async') one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) job = sample_job(notify_db, notify_db_session, scheduled_for=one_minute_in_the_past, job_status='scheduled') run_scheduled_jobs() updated_job = dao_get_job_by_id(job.id) assert updated_job.job_status == 'pending' mocked.assert_called_with([str(job.id)], queue='process-job')
def test_should_not_create_send_task_for_empty_file(sample_job, mocker): mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('empty')) mocker.patch('app.celery.tasks.send_sms.apply_async') process_job(sample_job.id) s3.get_job_from_s3.assert_called_once_with( str(sample_job.service.id), str(sample_job.id) ) job = jobs_dao.dao_get_job_by_id(sample_job.id) assert job.job_status == 'finished'
def process_incomplete_jobs(job_ids): jobs = [dao_get_job_by_id(job_id) for job_id in job_ids] # reset the processing start time so that the check_job_status scheduled task doesn't pick this job up again for job in jobs: job.job_status = JOB_STATUS_IN_PROGRESS job.processing_started = datetime.utcnow() dao_update_job(job) current_app.logger.info("Resuming Job(s) {}".format(job_ids)) for job_id in job_ids: process_incomplete_job(job_id)
def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): mocked = mocker.patch('app.celery.tasks.process_job.apply_async') one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) ten_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=10) twenty_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=20) job_1 = sample_job(notify_db, notify_db_session, scheduled_for=one_minute_in_the_past, job_status='scheduled') job_2 = sample_job(notify_db, notify_db_session, scheduled_for=ten_minutes_in_the_past, job_status='scheduled') job_3 = sample_job(notify_db, notify_db_session, scheduled_for=twenty_minutes_in_the_past, job_status='scheduled') run_scheduled_jobs() assert dao_get_job_by_id(job_1.id).job_status == 'pending' assert dao_get_job_by_id(job_2.id).job_status == 'pending' assert dao_get_job_by_id(job_2.id).job_status == 'pending' mocked.assert_has_calls([ call([str(job_3.id)], queue='process-job'), call([str(job_2.id)], queue='process-job'), call([str(job_1.id)], queue='process-job') ])
def test_should_not_process_email_job_if_would_exceed_send_limits(notify_db, notify_db_session, mocker): service = sample_service(notify_db, notify_db_session, limit=0) template = sample_email_template(notify_db, notify_db_session, service=service) job = sample_job(notify_db, notify_db_session, service=service, template=template) mocker.patch('app.celery.tasks.s3.get_job_from_s3') mocker.patch('app.celery.tasks.send_email.apply_async') process_job(job.id) job = jobs_dao.dao_get_job_by_id(job.id) assert job.job_status == 'sending limits exceeded' assert s3.get_job_from_s3.called is False assert tasks.send_email.apply_async.called is False
def process_row_from_job(job_id, job_row_number): job = dao_get_job_by_id(job_id) db_template = dao_get_template_by_id(job.template_id, job.template_version) template = db_template._as_utils_template() for row in RecipientCSV(s3.get_job_from_s3(str(job.service_id), str(job.id)), template_type=template.template_type, placeholders=template.placeholders).get_rows(): if row.index == job_row_number: notification_id = process_row(row, template, job, job.service) current_app.logger.info( "Process row {} for job {} created notification_id: {}".format( job_row_number, job_id, notification_id))
def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker): service = sample_service(notify_db, notify_db_session, limit=1) template = sample_email_template(notify_db, notify_db_session, service=service) job = sample_job(notify_db, notify_db_session, service=service, template=template) sample_notification(notify_db, notify_db_session, service=service, job=job) mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('email')) mocker.patch('app.celery.tasks.send_email.apply_async') process_job(job.id) job = jobs_dao.dao_get_job_by_id(job.id) assert job.job_status == 'sending limits exceeded' assert s3.get_job_from_s3.called is False assert tasks.send_email.apply_async.called is False
def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db, notify_db_session, mocker): service = sample_service(notify_db, notify_db_session, limit=9) job = sample_job(notify_db, notify_db_session, service=service, notification_count=10) mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_sms')) mocker.patch('app.celery.tasks.send_sms.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") process_job(job.id) job = jobs_dao.dao_get_job_by_id(job.id) assert job.job_status == 'sending limits exceeded' assert s3.get_job_from_s3.called is False assert tasks.send_sms.apply_async.called is False
def process_incomplete_job(job_id): job = dao_get_job_by_id(job_id) last_notification_added = dao_get_last_notification_added_for_job_id(job_id) if last_notification_added: resume_from_row = last_notification_added.job_row_number else: resume_from_row = -1 # The first row in the csv with a number is row 0 current_app.logger.info("Resuming job {} from row {}".format(job_id, resume_from_row)) recipient_csv, template, sender_id = get_recipient_csv_and_template_and_sender_id(job) for row in recipient_csv.get_rows(): if row.index > resume_from_row: process_row(row, template, job, job.service, sender_id=sender_id) job_complete(job, resumed=True)
def process_job(job_id): start = datetime.utcnow() job = dao_get_job_by_id(job_id) if job.job_status != JOB_STATUS_PENDING: return service = job.service if not service.active: job.job_status = JOB_STATUS_CANCELLED dao_update_job(job) current_app.logger.warning( "Job {} has been cancelled, service {} is inactive".format( job_id, service.id)) return if __sending_limits_for_job_exceeded(service, job, job_id): return job.job_status = JOB_STATUS_IN_PROGRESS job.processing_started = start dao_update_job(job) # Record StatsD stats to compute SLOs job_start = job.scheduled_for or job.created_at statsd_client.timing_with_dates("job.processing-start-delay", job.processing_started, job_start) db_template = dao_get_template_by_id(job.template_id, job.template_version) TemplateClass = get_template_class(db_template.template_type) template = TemplateClass(db_template.__dict__) current_app.logger.debug( "Starting job {} processing {} notifications".format( job_id, job.notification_count)) csv = get_recipient_csv(job, template) for row in csv.get_rows(): process_row(row, template, job, service) job_complete(job, start=start)
def process_job(job_id, sender_id=None): start = datetime.utcnow() job = dao_get_job_by_id(job_id) if job.job_status != JOB_STATUS_PENDING: return service = job.service if not service.active: job.job_status = JOB_STATUS_CANCELLED dao_update_job(job) current_app.logger.warning( "Job {} has been cancelled, service {} is inactive".format( job_id, service.id)) return if __sending_limits_for_job_exceeded(service, job, job_id): return job.job_status = JOB_STATUS_IN_PROGRESS job.processing_started = start dao_update_job(job) db_template = dao_get_template_by_id(job.template_id, job.template_version) TemplateClass = get_template_class(db_template.template_type) template = TemplateClass(db_template.__dict__) current_app.logger.debug( "Starting job {} processing {} notifications".format( job_id, job.notification_count)) for row in RecipientCSV( s3.get_job_from_s3(str(service.id), str(job_id)), template_type=template.template_type, placeholders=template.placeholders, max_rows=get_csv_max_rows(service.id), ).get_rows(): process_row(row, template, job, service, sender_id=sender_id) job_complete(job, start=start)
def test_should_process_all_sms_job(sample_job, sample_job_with_placeholdered_template, mocker): mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_sms')) mocker.patch('app.celery.tasks.send_sms.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") process_job(sample_job_with_placeholdered_template.id) s3.get_job_from_s3.assert_called_once_with( str(sample_job_with_placeholdered_template.service.id), str(sample_job_with_placeholdered_template.id) ) assert encryption.encrypt.call_args[0][0]['to'] == '+441234123120' assert encryption.encrypt.call_args[0][0]['template'] == str(sample_job_with_placeholdered_template.template.id) assert encryption.encrypt.call_args[0][0][ 'template_version'] == sample_job_with_placeholdered_template.template.version # noqa assert encryption.encrypt.call_args[0][0]['personalisation'] == {'phonenumber': '+441234123120', 'name': 'chris'} tasks.send_sms.apply_async.call_count == 10 job = jobs_dao.dao_get_job_by_id(sample_job_with_placeholdered_template.id) assert job.job_status == 'finished'