def create_job(): job_from_request = get_json_from_request('job') validation_result, validation_errors = valid_job_submission(job_from_request) if not validation_result: return jsonify( error="Invalid JSON", error_details=validation_errors ), 400 job = Job( name=job_from_request['name'], service_id=job_from_request['serviceId'], created_at=datetime.utcnow() ) if "filename" in job_from_request: job.filename = job_from_request['filename'] try: db.session.add(job) db.session.commit() return jsonify( job=job.serialize() ), 201 except IntegrityError as e: db.session.rollback() abort(400, e.orig)
def create_job( template, notification_count=1, created_at=None, job_status='pending', scheduled_for=None, processing_started=None, original_file_name='some.csv', archived=False ): data = { 'id': uuid.uuid4(), 'service_id': template.service_id, 'service': template.service, 'template_id': template.id, 'template_version': template.version, 'original_file_name': original_file_name, 'notification_count': notification_count, 'created_at': created_at or datetime.utcnow(), 'created_by': template.created_by, 'job_status': job_status, 'scheduled_for': scheduled_for, 'processing_started': processing_started, 'archived': archived } job = Job(**data) dao_create_job(job) return job
def new_job(): form = JobForm() form.customer_name.choices = [("", "Select Name")] + [ (c.id, c.name) for c in Customer.query.order_by('name') ] form.project_number.choices = [("", "Select Number")] + [ (p.id, p.number) for p in Project.query.order_by('number') ] form.project_name.choices = [("", "Select Name")] + [ (p.id, p.name) for p in Project.query.order_by('name') ] if form.validate_on_submit(): job = Job(project_id=form.project_name.data, phase=form.phase.data, job_type=form.job_type.data) db.session.add(job) db.session.commit() flash( f'Job {job.project_name()} {job.phase} {job.job_type} has been added to the database.' ) return redirect(url_for('main.index')) return render_template('new_job.html', title='New Job', form=form)
def sample_email_job(notify_db, notify_db_session, service=None, template=None): if service is None: service = sample_service(notify_db, notify_db_session) if template is None: template = sample_email_template( notify_db, notify_db_session, service=service) job_id = uuid.uuid4() data = { 'id': job_id, 'service_id': service.id, 'service': service, 'template_id': template.id, 'template_version': template.version, 'original_file_name': 'some.csv', 'notification_count': 1, 'created_by': service.created_by } job = Job(**data) dao_create_job(job) return job
def sample_job(notify_db, notify_db_session, service=None, template=None, notification_count=1, created_at=None, job_status='pending', scheduled_for=None, processing_started=None, original_file_name='some.csv', archived=False): if service is None: service = create_service(check_if_service_exists=True) if template is None: template = create_template(service=service) data = { 'id': uuid.uuid4(), 'service_id': service.id, 'service': service, 'template_id': template.id, 'template_version': template.version, 'original_file_name': original_file_name, 'notification_count': notification_count, 'created_at': created_at or datetime.utcnow(), 'created_by': service.created_by, 'job_status': job_status, 'scheduled_for': scheduled_for, 'processing_started': processing_started, 'archived': archived } job = Job(**data) dao_create_job(job) return job
def save_city_given_list_of_posts(posts, month, year): all_cities = { city: id for (city, id) in db.session.query(City.name, City.id).all() } unmatched_jobs = [] # posts is a list of tuples (html_post, plain_post, hackernews_hd) for post in posts: at_least_one_match = False (html_post, plain_post, hn_id) = post for city in all_cities: maybe_city = plain_post.find(city) if maybe_city != -1: if re.match(city + '([^a-z]|$)', plain_post[maybe_city:]): newJob = Job(unicode(html_post), month, year, hn_id, all_cities[city]) db.session.add(newJob) at_least_one_match = True if not at_least_one_match: unmatched_jobs.append((plain_post, hn_id)) db.session.commit() db.session.close() if not os.path.exists('debug'): os.makedirs('debug') with open('debug/no_matches-{0}-{1}.txt'.format(month, year), 'w') as f: for (job, hn_id) in unmatched_jobs: f.write(hn_id) f.write(job.encode('utf-8'))
def started_job(job_id): command = request.json['command'] job = Job.objects(id=job_id).first() job.status = 'started' job.command = command job.save() return jsonify({'message': 'ok'})
def sample_job( notify_db, notify_db_session, service=None, template=None, notification_count=1, created_at=None, job_status="pending", scheduled_for=None, processing_started=None, original_file_name="some.csv", archived=False, ): if service is None: service = create_service(check_if_service_exists=True) if template is None: template = create_template(service=service) data = { "id": uuid.uuid4(), "service_id": service.id, "service": service, "template_id": template.id, "template_version": template.version, "original_file_name": original_file_name, "notification_count": notification_count, "created_at": created_at or datetime.utcnow(), "created_by": service.created_by, "job_status": job_status, "scheduled_for": scheduled_for, "processing_started": processing_started, "archived": archived, } job = Job(**data) dao_create_job(job) return job
def list_jobs(): if request.method == 'POST': # print(request.data) # print(type(request.data)) data_dict = json.loads(request.data) # print(data_dict) # print(type(data_dict)) job_object = Job(events=data_dict.get('events'), name=data_dict.get('name'), out_dir=data_dict.get('out_dir'), log_dir=data_dict.get('log_dir'), new_file=data_dict.get('new_file')) job_object.save() if data_dict.get('start') is True: job_object.start() started = True else: started = False return jsonify({'job_id': str(job_object.job_id), 'started': started}) else: # Needs testing, potentially bad practice. # print([job.get() for job in Job.objects()]) return jsonify({"jobs": [job.get() for job in Job.objects()]})
def batch_import_route(): # only for users who have custom domains if not current_user.verified_custom_domains(): flash("Alias batch import is only available for custom domains", "warning") if current_user.disable_import: flash( "you cannot use the import feature, please contact SimpleLogin team", "error", ) return redirect(url_for("dashboard.index")) batch_imports = BatchImport.filter_by(user_id=current_user.id).all() if request.method == "POST": alias_file = request.files["alias-file"] file_path = random_string(20) + ".csv" file = File.create(user_id=current_user.id, path=file_path) s3.upload_from_bytesio(file_path, alias_file) Session.flush() LOG.d("upload file %s to s3 at %s", file, file_path) bi = BatchImport.create(user_id=current_user.id, file_id=file.id) Session.flush() LOG.d("Add a batch import job %s for %s", bi, current_user) # Schedule batch import job Job.create( name=JOB_BATCH_IMPORT, payload={"batch_import_id": bi.id}, run_at=arrow.now(), ) Session.commit() flash( "The file has been uploaded successfully and the import will start shortly", "success", ) return redirect(url_for("dashboard.batch_import_route")) return render_template("dashboard/batch_import.html", batch_imports=batch_imports)
def insert_one(self, job): """Insert a New Job""" job = Job( name=job["name"], status=self.PENDING if job["interval"]["type"] == self.ONCE or job["interval"]["type"] == self.AFTER or job["interval"]["type"] == self.ONCE_AT else self.DAEMON, last_status=self.PENDING if job["interval"]["type"] == self.ONCE or job["interval"]["type"] == self.AFTER or job["interval"]["type"] == self.ONCE_AT else self.DAEMON, executor=job["executor"], parameters=json.dumps(job["parameters"]), interval=json.dumps(job["interval"]), retry_count=job["retry_count"] if "retry_count" in job else 0, trials=job["trials"] if "trials" in job else 5, priority=job["priority"] if "priority" in job else 1, last_run=job["last_run"] if "last_run" in job else None, run_at=job["run_at"] if "run_at" in job else self.get_run_at(job["interval"]), locked=job["locked"] if "locked" in job else False, ) job.save() return False if job.pk is None else job
def finished_job(job_id): job = Job.objects(id=job_id).first() job.status = 'finished' job.save() user = User.objects(id=job.user_id).first() mailer.notify(job, user, job_id) return jsonify({'message': 'ok'})
def search(): if not g.search_form.validate(): return redirect(url_for('jobs')) jobs_results, total = Job.search(g.search_form.q.data) return render_template('search.html', title=('Search'), jobs_results=jobs_results, total=total)
def create_and_start_job(sim_id, form, extra_file=None): job_attrs = dict() data = { 'simulator': sim_id, 'status': 'created', 'user_id': current_user.id, 'attrs': job_attrs } for name, value in form.data.items(): if name == 'name': data['name'] = value elif name == 'privacy': data['privacy'] = value elif name == 'sequence_identifier': # NOTE: sequence identifier is for artificialfastqgenerator where # this field must be the header to a sequence and it must start with > job_attrs[name] = str(value)[1:] elif isinstance(value, bool): # NOTE: let 1 equal true and 0 false job_attrs[name] = '1' if value else '0' elif name != 'csrf_token' and name != 'file' and name != 'submit': job_attrs[name] = str(value) if extra_file is not None: job_attrs['extra_file'] = 'extra_file' # Apply the job's tags to the dictionary to be stored in the database. for key in ['ref_db', 'genomics', 'tech', 'variants']: data[key] = SIMULATORS[sim_id][key] job = Job(**data) job.save() # If there is no job name, then set the job id as the job name. if job.name == '': job.name = str(job.id) job.save() client = arc.Client('newriver1.arc.vt.edu', arc.ARC_USER) remote_path = arc.get_remote_path(job.id) client.run('mkdir {}'.format(remote_path)) # The path where all output files are written to is the project's directory. # NOTE: not all simulators have an input file (ex. xs) if hasattr(form, 'file'): project_dir = os.path.join(arc.ARC_DIR, str(job.id)) filename = form.file.data.filename tmp_path = os.path.join('/tmp', filename) form.file.data.save(tmp_path) client.put_file(tmp_path, os.path.join(remote_path, 'input.fasta')) if extra_file is not None: tmp_path = os.path.join('/tmp', 'extra_file') extra_file.data.save(tmp_path) client.put_file(tmp_path, os.path.join(remote_path, 'extra_file')) client.close() start_job.apply_async(args=[str(job.id)])
def notify_db_session(request): meta = MetaData(bind=db.engine, reflect=True) # Set up dummy org, with a service and a job org = Organisation(id=1234, name="org test") token = Token(id=1234, token="1234", type='admin') service = Service( id=1234, name="service test", created_at=datetime.utcnow(), token=token, active=True, restricted=False, limit=100 ) job = Job(id=1234, name="job test", created_at=datetime.utcnow(), service=service) notification = Notification( id=1234, to="phone-number", message="this is a message", job=job, status="created", method="sms", created_at=datetime.utcnow() ) # Setup a dummy user for tests user = User( id=1234, email_address="*****@*****.**", mobile_number="+449999234234", password=generate_password_hash('valid-password'), active=True, created_at=datetime.utcnow(), updated_at=datetime.utcnow(), password_changed_at=datetime.utcnow(), role='admin' ) service.users.append(user) db.session.add(token) db.session.add(org) db.session.add(service) db.session.add(notification) db.session.add(job) db.session.add(user) db.session.commit() def teardown(): db.session.remove() for tbl in reversed(meta.sorted_tables): db.engine.execute(tbl.delete()) request.addfinalizer(teardown)
def company(company_name): company = Startup.query.filter_by(company_name=company_name).first_or_404() # for now, but I have to see how competitors treat this form = PostNewJobForm() is_admin = (current_user.startup.first() == company) if is_admin: jobs = company.jobs no_jobs = (len([job for job in jobs]) == 0) else: jobs, no_jobs = None, True logo_pic = company.logo_data is not None logo_url = None if logo_pic: url = company.logo_data.split('/') logo_url = "/{0}/{1}".format(url[-2], url[-1]) if form.validate_on_submit(): kwargs = { 'name': form.name.data, 'job_description': form.job_description.data, 'offer_price': form.offer_price.data, 'job_type': form.job_type.data, 'estimated_developement_time': form.estimated_developement_time.data, 'equity_job': form.equity_job.data } job = Job(**kwargs) job.post_job_time() db.session.add(job) company.create_job(job) db.session.commit() flash('Successfully created new job') return redirect(url_for('company', company_name=company_name)) kwargs = { 'title': company_name, 'company_name': company_name, 'company': company, 'is_admin': is_admin, 'jobs': jobs, 'no_jobs': no_jobs, 'logo_url': logo_url, 'form': form } return render_template('company.html', **kwargs)
def make_dummy_job(): job = Job(complete=False, error=False, visited=datetime.now(), buildid='buildid', timing=0.0, script=""" with import <nixpkgs> {}; stdenv.mkDerivation { name = "example"; phases = "installPhase"; installPhase = '' echo "Random String: """ + uuid.uuid4().hex + """\"; mkdir $out touch $out/hello ''; } """) job.save()
def importComments(_fileList): # load all cities in a dict for faster lookup dd = { city: id for (city, id) in db.session.query(City.name, City.id).all() } s = bs(open(os.path.join('hn-pages', _fileList))) t = s.title.get_text() title = t[t.index('(') + 1:t.index(')')].split(' ') month = monthify(title[0]) year = title[1] # let's dig through this thing hnmain = s.find('table') outerTable = hnmain.findAll('tr')[3] innerTable = outerTable.findAll('table')[1] # inside innerTable, every <tr> is a post, # but the actual stuff in nested in yet another <tr> (wtf?) posts = innerTable.findAll('tr', recursive=False) with open('hn-pages/debug-{0}-{1}.txt'.format(year, month), 'w') as f: for p in posts: c = p.find('tr') # if the post is not a reply, process it if c.find( lambda tag: tag.name == 'img' and int(tag['width']) == 0): content = c.find('span', class_='comment') plain = content.get_text() found_flag = False for city in dd: position = plain.find(city) if position != -1: found_flag = True if re.match(city + '([^a-z]|$)', plain[position:]): # get the original HN id and push the new job hn_id = c.findAll('a')[2]['href'].split('=')[1] newJob = Job(unicode(content), month, year, hn_id, dd[city]) db.session.add(newJob) else: f.write('--DITCHED BY REGEX--\n') f.write(plain[position:position + 50].encode('utf-8')) f.write('\n\n\n') if not found_flag: f.write('--NO CITY FOUND FOR--\n') f.write(plain.encode('utf-8')) f.write('\n\n\n') # end of for p in posts f.write('Imported successfully!') db.session.commit() db.session.close()
def past_jobs_pretty(): past_jobs = [] for job in Job.get_past_jobs(): past_jobs.append( (str(job.id), '{} on {}, {}'.format(job.address, timestamp_pretty(job.date, show_time=False), job.time))) return past_jobs
def save(): try: # get headers first # save through headers # jobtitle = request.headers.get('jobtitle') # company = request.headers.get('company') # city = request.headers.get('city') # state = request.headers.get('state') # zip = request.headers.get('zip') # descr = request.headers.get('descr') # jstatus = request.headers.get('jstatus') # link = request.headers.get('link') # duties = request.headers.get('duties') # requi = request.headers.get('requi') # post_date = request.headers.get('post_date') # save through parameters jobtitle = request.args.get('jobtitle') company = request.args.get('company') city = request.args.get('city') state = request.args.get('state') zip = request.args.get('zip') descr = request.args.get('descr') jstatus = request.args.get('jstatus') link = request.args.get('link') duties = request.args.get('duties') requi = request.args.get('requi') post_date = request.args.get('post_date') if not jobtitle and not company and not city and not state and not zip and not descr and not jstatus and not link and not duties and not requi and not post_date: return jsonify({'error #301': 'Invalid params'}) # create an event job = Job(jobtitle=jobtitle, company=company, city=city, state=state, zip=zip, descr=descr, jstatus=jstatus, link=link, duties=duties, requi=requi, post_date=post_date) # add to stage and commit to db db.session.add(job) db.session.commit() return jsonify({'success': 'job saved'}) except: return jsonify({'error #303': 'job could not be saved'})
def initiate_video(): if request.method == 'POST': data = request.form if 'video_name' not in data or 'video_url' not in data: raise InvalidUsage('Data must contain video_name and video_url') video = Video(name=data['video_name'], url=data['video_url']) db.session.add(video) db.session.commit() task = split_video.delay(video.id) job = Job(desc=SPLIT_JOB_DESC, celery_id=task.id, video=video) db.session.add(job) db.session.commit() return redirect(url_for('jobs', video_id=video.id)) return render_template('submit_job.html')
def create_job(): job_from_request = get_json_from_request('job') validation_result, validation_errors = valid_job_submission( job_from_request) if not validation_result: return jsonify(error="Invalid JSON", error_details=validation_errors), 400 job = Job(name=job_from_request['name'], service_id=job_from_request['serviceId'], created_at=datetime.utcnow()) if "filename" in job_from_request: job.filename = job_from_request['filename'] try: db.session.add(job) db.session.commit() return jsonify(job=job.serialize()), 201 except IntegrityError as e: db.session.rollback() abort(400, e.orig)
def addjob(): if request.method == 'POST': data = {} req = request.form if current_user.is_authenticated: user = User.query.get(current_user.get_id()) if not user.user_roll() == "Employer": return "You Don't Permission To Access This Page" data['title'] = req['title'] data['description'] = req['description'] #data['email']=req['email'] data['type'] = req['type'] data['category'] = req['category'] data['experience'] = req['experience'] data['salary'] = req['salary'] data['clevel'] = req['clevel'] data['industry'] = req['industry'] data['qualification'] = req['qualification'] data['deadline'] = req['deadline'] data['country'] = req['country'] data['city'] = req['city'] data['address'] = req['address'] data['openings'] = req['openings'] ft = False if data['type'] == 'true': ft = True else: ft = False today = datetime.strptime(data['deadline'], '%Y-%m-%d').date() eid = Employer.query.filter_by( user_id=current_user.get_id()).first_or_404() newjob = Job(title=data['title'], jobdesc=data['description'], exp=data['experience'], qualification=data['qualification'], career_level=data['clevel'], location=data['address'], fulltime=ft, city=data['city'], salary=data['salary'], user_id=eid.id, category=data['category'], openings=data['openings'], expiry_date=today) db.session.add(newjob) db.session.commit() print("-----new job added-----") return redirect(url_for('manageJob'))
def add_job(request): try: request_data = json.loads(json.dumps(request.data)) tags = request_data.get('tags').split(",") Job(company_image=request_data.get('companyImage'), company_name=request_data.get('companyName'), job_title=request_data.get('jobTitle'), job_location=request_data.get('jobLocation'), job_link=request_data.get('jobLink'), job_content=request_data.get('jobContent'), tags=tags, automated=False).save() return Response({"saved": 1}) except: return Response({"saved": 0})
def sample_letter_job(sample_letter_template): service = sample_letter_template.service data = { "id": uuid.uuid4(), "service_id": service.id, "service": service, "template_id": sample_letter_template.id, "template_version": sample_letter_template.version, "original_file_name": "some.csv", "notification_count": 1, "created_at": datetime.utcnow(), "created_by": service.created_by, } job = Job(**data) dao_create_job(job) return job
def sample_letter_job(sample_letter_template): service = sample_letter_template.service data = { 'id': uuid.uuid4(), 'service_id': service.id, 'service': service, 'template_id': sample_letter_template.id, 'template_version': sample_letter_template.version, 'original_file_name': 'some.csv', 'notification_count': 1, 'created_at': datetime.utcnow(), 'created_by': service.created_by, } job = Job(**data) dao_create_job(job) return job
def create_job_post(): form = JobForm() if form.validate_on_submit(): postImage = "" if form.postImage.data: postImage = save_post_image(form.postImage.data) post = Job(Title=form.title.data, Content=form.content.data, ImageFile=postImage) db.session.add(post) db.session.commit() flash("Succesfully Posted!", category='success') return redirect(url_for('main.job')) return render_template('job/create-job-post.html', form=form, title="Make a story")
def newJob(): form = JobForm() if request.method == "POST" and form.validate_on_submit(): #create a job object and add it to the database job = Job( request.form['jTitle'], request.form['jDesc'], request.form['category'], request.form['company'], request.form['jLoc'], datetime.now() ) db.session.add(job) db.session.commit() flash('Job Added', 'success') return redirect(url_for("home")) return render_template('', form=form)
def test_basic_functionality(self): jobStatusId = JobStatus.query.filter_by( label='running').first().statusId self.assertIsNotNone(jobStatusId) testLabel = 'test job 1' db.session.add(Job(label=testLabel, statusId=jobStatusId)) db.session.commit() testJob = Job.query.filter_by(label=testLabel).first() self.assertIsNotNone(testJob) self.assertEqual(testJob.status.label, 'running') self.assertEqual(testJob.type, 'job')
def test_subcat_cat(self): cat = Cat(name='Web Developement') sub = Sub(name='Frontend') job = Job(name='A') db.session.add_all([cat, sub, job]) db.session.commit() job.add_cat(cat) cat.assign_sub_cat(sub) db.session.commit() job.add_sub_cat(sub) db.session.commit() check = (sub in job.subs) self.assertTrue(check)
def sample_email_job(notify_db, notify_db_session, service=None, template=None): if service is None: service = create_service(check_if_service_exists=True) if template is None: template = sample_email_template(notify_db, notify_db_session, service=service) job_id = uuid.uuid4() data = { "id": job_id, "service_id": service.id, "service": service, "template_id": template.id, "template_version": template.version, "original_file_name": "some.csv", "notification_count": 1, "created_by": service.created_by, } job = Job(**data) dao_create_job(job) return job
def create(matrixA, matrixB): job = Job(matrixA, matrixB) db.session.add(job) db.session.commit() job.loadMatrices(matrixA, matrixB) return job
def submit(self, run, analysis, cfg): ana_name, ana_template, ana_context, ana_post_exec = ( cfg['name'], cfg['template'], cfg.get('context',{}), cfg.get('post_exec',None) ) logger.info('Create job for run %d, analysys %s' % (run.RunNumber, ana_name)) job = Job(Run=run, Analysis=analysis) job.save() # Create directory job_dir = os.path.join( settings.CAF_OUTPUT, "%08d_%s_%d" % (run.RunNumber, ana_name, job.id) ) try: os.makedirs(job_dir) except: logger.exception("Could not create directory %s", job_dir) job.delete() return # Copy files for job tmpl = get_template('joboptions/%s.py.tmpl' % ana_template) ana_context.update({'files': run.Files.all()}) jobo = tmpl.render(ana_context) jopo_path = os.path.join(job_dir, 'jobo.py') with open(jopo_path, 'w') as f: f.write(jobo) post_exec = '' if ana_post_exec: post_exec = get_template('%s.sh.tmpl' % ana_post_exec).render( { 'BASE_DIR': settings.BASE_DIR } ) launcher = get_template('launcher.sh.tmpl').render( { 'athena_version': settings.CAF_ATHENA, 'asetup': settings.CAF_ASETUP, 'post_exec': post_exec, 'BASE_DIR': settings.BASE_DIR } ) launcher_path = os.path.join(job_dir, 'launcher.sh') with open(launcher_path, 'w') as f: f.write(launcher) st = os.stat(launcher_path) os.chmod(launcher_path, st.st_mode | stat.S_IEXEC) output_path = os.path.join(job_dir, 'stdout.log') err_path = os.path.join(job_dir, 'stderr.log') with nested(open(output_path, 'w'), open(err_path, 'w')) as (out, err): process = subprocess.Popen(launcher_path, cwd=job_dir, stdout=out, stderr=err) pid_path = os.path.join(job_dir, 'pid') with open(pid_path, 'w') as pid: pid.write(str(process.pid)) job.IsLocal = True job.ExternalId = process.pid job.Status = Job.RUNNING job.save()
def create_sms_notification(): if not current_app.config['SMS_ENABLED']: return jsonify(error="SMS is unavailable"), 503 notification_request = get_json_from_request('notification') validation_result, validation_errors = valid_sms_notification(notification_request) if not validation_result: return jsonify( error="Invalid JSON", error_details=validation_errors ), 400 incoming_token = get_token_from_headers(request.headers) if not incoming_token: return jsonify(error="No credentials supplied"), 400 service = Service.query.join(Token).filter(Token.token == incoming_token).first() if not service: return jsonify(error="No service associated with these credentials"), 400 if not service.active: return jsonify(error="Service is inactive"), 400 if "jobId" in notification_request: job = Job.query.filter(Job.id == notification_request["jobId"]).first() if not job: return jsonify(error="No job associated with this job id"), 400 elif job.service_id != service.id: abort(400, "Invalid job id for these credentials") else: job = Job(name="Autogenerated", created_at=datetime.utcnow(), service=service) if service.restricted: if not notification_request['to'] in [user.mobile_number for user in service.users]: abort(400, "Restricted service: cannot send notification to this number") usage = Usage.query.filter(Usage.day == datetime.utcnow().date(), Usage.service_id == service.id).first() if usage: usage.count += 1 else: usage = Usage( day=datetime.utcnow().date(), count=1, service_id=service.id ) if usage.count > service.limit: abort(429, "Exceeded sending limits for today") notification = Notification( to=notification_request['to'], message=notification_request['message'], status='created', method='sms', created_at=datetime.utcnow(), job=job ) if 'description' in notification_request: job.name = notification_request['description'] try: db.session.add(usage) db.session.add(notification) db.session.commit() send_messages_to_queue('sms', [notification]) except IntegrityError: db.session.rollback() abort(400, "Failed to create notification: DB error") return jsonify(notification=Notification.query.filter(Notification.id == notification.id).first().serialize()), 201