def create_job(service_id): dao_fetch_service_by_id(service_id) data = request.get_json() data.update({ "service": service_id }) template = dao_get_template_by_id(data['template']) errors = unarchived_template_schema.validate({'archived': template.archived}) if errors: raise InvalidRequest(errors, status_code=400) data.update({"template_version": template.version}) job = job_schema.load(data).data if job.scheduled_for: job.job_status = JOB_STATUS_SCHEDULED dao_create_job(job) if job.job_status == JOB_STATUS_PENDING: process_job.apply_async([str(job.id)], queue="process-job") job_json = job_schema.dump(job).data job_json['statistics'] = [] return jsonify(data=job_json), 201
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config['PAGE_SIZE'], statuses=statuses) data = job_schema.dump(pagination.items, many=True).data for job_data in data: statistics = dao_get_notification_outcomes_for_job( service_id, job_data['id']) job_data['statistics'] = [{ 'status': statistic[1], 'count': statistic[0] } for statistic in statistics] return { 'data': data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links(pagination, '.get_jobs_by_service', service_id=service_id) }
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config['PAGE_SIZE'], statuses=statuses ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: start = job_data['processing_started'] start = dateutil.parser.parse(start).replace(tzinfo=None) if start else None if start is None: statistics = [] elif start.replace(tzinfo=None) < midnight_n_days_ago(3): # ft_notification_status table statistics = fetch_notification_statuses_for_job(job_data['id']) else: # notifications table statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id']) job_data['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in statistics] return { 'data': data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links( pagination, '.get_jobs_by_service', service_id=service_id ) }
def get_job_by_service_and_job_id(service_id, job_id): job = dao_get_job_by_service_id_and_job_id(service_id, job_id) statistics = dao_get_notification_outcomes_for_job(service_id, job_id) data = job_schema.dump(job).data data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics] return jsonify(data=data)
def create_job(service_id): job, errors = job_schema.load(request.get_json()) if errors: return jsonify(result="error", message=errors), 400 try: save_job(job) except Exception as e: return jsonify(result="error", message=str(e)), 500 return jsonify(data=job_schema.dump(job).data), 201
def test_job_schema_doesnt_return_notifications(sample_notification_with_job): from app.schemas import job_schema job = sample_notification_with_job.job assert job.notifications.count() == 1 data, errors = job_schema.dump(job) assert not errors assert 'notifications' not in data
def create_job(service_id): service = dao_fetch_service_by_id(service_id) if not service.active: raise InvalidRequest("Create job is not allowed: service is inactive ", 403) data = request.get_json() data.update({"service": service_id}) try: data.update(**get_job_metadata_from_s3(service_id, data['id'])) except KeyError: raise InvalidRequest({'id': ['Missing data for required field.']}, status_code=400) data['template'] = data.pop('template_id') template = dao_get_template_by_id(data['template']) if template.template_type == LETTER_TYPE and service.restricted: raise InvalidRequest( "Create letter job is not allowed for service in trial mode ", 403) if data.get('valid') != 'True': raise InvalidRequest("File is not valid, can't create job", 400) errors = unarchived_template_schema.validate( {'archived': template.archived}) if errors: raise InvalidRequest(errors, status_code=400) data.update({"template_version": template.version}) job = job_schema.load(data).data if job.scheduled_for: job.job_status = JOB_STATUS_SCHEDULED dao_create_job(job) sender_id = data.get('sender_id') if job.job_status == JOB_STATUS_PENDING: process_job.apply_async([str(job.id)], {'sender_id': sender_id}, queue=QueueNames.JOBS) job_json = job_schema.dump(job).data job_json['statistics'] = [] return jsonify(data=job_json), 201
def get_job_for_service(service_id, job_id=None): if job_id: try: job = get_job(service_id, job_id) data, errors = job_schema.dump(job) return jsonify(data=data) except DataError: return jsonify(result="error", message="Invalid job id"), 400 except NoResultFound: return jsonify(result="error", message="Job not found"), 404 else: jobs = get_jobs_by_service(service_id) data, errors = jobs_schema.dump(jobs) return jsonify(data=data)
def post_bulk(): try: request_json = request.get_json() except werkzeug.exceptions.BadRequest as e: raise BadRequestError(message=f"Error decoding arguments: {e.description}", status_code=400) max_rows = current_app.config["CSV_MAX_ROWS"] form = validate(request_json, post_bulk_request(max_rows)) if len([source for source in [form.get("rows"), form.get("csv")] if source]) != 1: raise BadRequestError(message="You should specify either rows or csv", status_code=400) template = validate_template_exists(form["template_id"], authenticated_service) check_service_has_permission(template.template_type, authenticated_service.permissions) remaining_messages = authenticated_service.message_limit - fetch_todays_total_message_count(authenticated_service.id) form["validated_sender_id"] = validate_sender_id(template, form.get("reply_to_id")) try: if form.get("rows"): output = StringIO() writer = csv.writer(output) writer.writerows(form["rows"]) file_data = output.getvalue() else: file_data = form["csv"] recipient_csv = RecipientCSV( file_data, template_type=template.template_type, placeholders=template._as_utils_template().placeholders, max_rows=max_rows, safelist=safelisted_members(authenticated_service, api_user.key_type), remaining_messages=remaining_messages, ) except csv.Error as e: raise BadRequestError(message=f"Error converting to CSV: {str(e)}", status_code=400) check_for_csv_errors(recipient_csv, max_rows, remaining_messages) job = create_bulk_job(authenticated_service, api_user, template, form, recipient_csv) return jsonify(data=job_schema.dump(job).data), 201
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config["PAGE_SIZE"], statuses=statuses, ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: start = job_data["processing_started"] start = dateutil.parser.parse(start).replace( tzinfo=None) if start else None if start is None: statistics = [] elif start.replace(tzinfo=None) < midnight_n_days_ago(3): # ft_notification_status table statistics = fetch_notification_statuses_for_job(job_data["id"]) else: # notifications table statistics = dao_get_notification_outcomes_for_job( service_id, job_data["id"]) job_data["statistics"] = [{ "status": statistic.status, "count": statistic.count } for statistic in statistics] return { "data": data, "page_size": pagination.per_page, "total": pagination.total, "links": pagination_links(pagination, ".get_jobs_by_service", service_id=service_id), }
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config['PAGE_SIZE'], statuses=statuses ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id']) job_data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics] return { 'data': data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links( pagination, '.get_jobs_by_service', service_id=service_id ) }
def create_job(service_id): service = dao_fetch_service_by_id(service_id) if not service.active: raise InvalidRequest("Create job is not allowed: service is inactive ", 403) data = request.get_json() data.update({"service": service_id}) template = dao_get_template_by_id(data['template']) if template.template_type == LETTER_TYPE and service.restricted: raise InvalidRequest( "Create letter job is not allowed for service in trial mode ", 403) errors = unarchived_template_schema.validate( {'archived': template.archived}) if errors: raise InvalidRequest(errors, status_code=400) data.update({"template_version": template.version}) job = job_schema.load(data).data if job.scheduled_for: job.job_status = JOB_STATUS_SCHEDULED dao_create_job(job) if job.job_status == JOB_STATUS_PENDING: process_job.apply_async([str(job.id)], queue=QueueNames.JOBS) job_json = job_schema.dump(job).data job_json['statistics'] = [] return jsonify(data=job_json), 201
def get_letter_jobs(): letter_jobs = dao_get_all_letter_jobs() data = job_schema.dump(letter_jobs, many=True).data return jsonify(data=data), 200