Beispiel #1
0
def create_job(service_id):
    dao_fetch_service_by_id(service_id)

    data = request.get_json()

    data.update({
        "service": service_id
    })
    template = dao_get_template_by_id(data['template'])

    errors = unarchived_template_schema.validate({'archived': template.archived})

    if errors:
        raise InvalidRequest(errors, status_code=400)

    data.update({"template_version": template.version})

    job = job_schema.load(data).data

    if job.scheduled_for:
        job.job_status = JOB_STATUS_SCHEDULED

    dao_create_job(job)

    if job.job_status == JOB_STATUS_PENDING:
        process_job.apply_async([str(job.id)], queue="process-job")

    job_json = job_schema.dump(job).data
    job_json['statistics'] = []

    return jsonify(data=job_json), 201
Beispiel #2
0
def create_bulk_job(service, api_key, template, form, recipient_csv):
    upload_id = upload_job_to_s3(service.id, recipient_csv.file_data)
    sender_id = form["validated_sender_id"]

    data = {
        "id": upload_id,
        "service": service.id,
        "template": template.id,
        "notification_count": len(recipient_csv),
        "template_version": template.version,
        "job_status": JOB_STATUS_PENDING,
        "original_file_name": form.get("name"),
        "created_by": current_app.config["NOTIFY_USER_ID"],
        "api_key": api_key.id,
        "sender_id": uuid.UUID(str(sender_id)) if sender_id else None,
    }
    if form.get("scheduled_for"):
        data["job_status"] = JOB_STATUS_SCHEDULED
        data["scheduled_for"] = form.get("scheduled_for")

    job = job_schema.load(data).data
    dao_create_job(job)

    if job.job_status == JOB_STATUS_PENDING:
        process_job.apply_async([str(job.id)], queue=QueueNames.JOBS)

    return job
def run_scheduled_jobs():
    try:
        for job in dao_set_scheduled_jobs_to_pending():
            process_job.apply_async([str(job.id)], queue="process-job")
            current_app.logger.info("Job ID {} added to process job queue".format(job.id))
    except SQLAlchemyError as e:
        current_app.logger.exception("Failed to run scheduled jobs")
        raise
def run_scheduled_jobs():
    try:
        for job in dao_set_scheduled_jobs_to_pending():
            process_job.apply_async([str(job.id)], queue=QueueNames.JOBS)
            current_app.logger.info("Job ID {} added to process job queue".format(job.id))
    except SQLAlchemyError:
        current_app.logger.exception("Failed to run scheduled jobs")
        raise
Beispiel #5
0
def create_job(service_id):
    service = dao_fetch_service_by_id(service_id)
    if not service.active:
        raise InvalidRequest("Create job is not allowed: service is inactive ",
                             403)

    data = request.get_json()

    data.update({"service": service_id})
    try:
        data.update(**get_job_metadata_from_s3(service_id, data['id']))
    except KeyError:
        raise InvalidRequest({'id': ['Missing data for required field.']},
                             status_code=400)

    data['template'] = data.pop('template_id')
    template = dao_get_template_by_id(data['template'])

    if template.template_type == LETTER_TYPE and service.restricted:
        raise InvalidRequest(
            "Create letter job is not allowed for service in trial mode ", 403)

    if data.get('valid') != 'True':
        raise InvalidRequest("File is not valid, can't create job", 400)

    errors = unarchived_template_schema.validate(
        {'archived': template.archived})

    if errors:
        raise InvalidRequest(errors, status_code=400)

    data.update({"template_version": template.version})

    job = job_schema.load(data).data

    if job.scheduled_for:
        job.job_status = JOB_STATUS_SCHEDULED

    dao_create_job(job)

    sender_id = data.get('sender_id')

    if job.job_status == JOB_STATUS_PENDING:
        process_job.apply_async([str(job.id)], {'sender_id': sender_id},
                                queue=QueueNames.JOBS)

    job_json = job_schema.dump(job).data
    job_json['statistics'] = []

    return jsonify(data=job_json), 201
Beispiel #6
0
def create_job(service_id):
    service = dao_fetch_service_by_id(service_id)
    if not service.active:
        raise InvalidRequest("Create job is not allowed: service is inactive ",
                             403)

    data = request.get_json()

    data.update({"service": service_id})
    template = dao_get_template_by_id(data['template'])

    if template.template_type == LETTER_TYPE and service.restricted:
        raise InvalidRequest(
            "Create letter job is not allowed for service in trial mode ", 403)

    errors = unarchived_template_schema.validate(
        {'archived': template.archived})

    if errors:
        raise InvalidRequest(errors, status_code=400)

    data.update({"template_version": template.version})

    job = job_schema.load(data).data

    if job.scheduled_for:
        job.job_status = JOB_STATUS_SCHEDULED

    dao_create_job(job)

    if job.job_status == JOB_STATUS_PENDING:
        process_job.apply_async([str(job.id)], queue=QueueNames.JOBS)

    job_json = job_schema.dump(job).data
    job_json['statistics'] = []

    return jsonify(data=job_json), 201