def test_audit(app): user_id = None workflow_id = None with app.app_context(): user = User(email="*****@*****.**", active=True) user.password = "******" db.session.add(user) workflows_object = WorkflowObject.create_object() workflows_object.save() db.session.commit() user_id = user.id workflow_id = workflows_object.id with app.app_context(): logging_info = { 'object_id': workflow_id, 'user_id': user_id, 'score': 0.222113, 'user_action': "Non-CORE", 'decision': "Rejected", 'source': "test", 'action': "accept" } audit = WorkflowsAudit(**logging_info) audit.save() db.session.commit() assert WorkflowsAudit.query.count() == 1 audit_entry = WorkflowsAudit.query.filter( WorkflowsAudit.object_id == workflow_id ).one() assert audit_entry assert audit_entry.action == "accept" assert audit_entry.score == 0.222113 prediction_results = dict( max_score=0.222113, decision="Rejected" ) with app.app_context(): log_workflows_action( action="accept_core", prediction_results=prediction_results, object_id=workflow_id, user_id=None, source="test", user_action="accept" ) db.session.commit() assert WorkflowsAudit.query.count() == 2 audit_entry = WorkflowsAudit.query.filter( WorkflowsAudit.action == "accept_core" ).one() assert audit_entry assert audit_entry.action == "accept_core" assert audit_entry.score == 0.222113
def submit(): """Get form data and start workflow.""" form = LiteratureForm(formdata=request.form) visitor = DataExporter() visitor.visit(form) workflow_object = WorkflowObject.create_object( id_user=current_user.get_id()) workflow_object.data = convert_data_to_model(workflow_object, visitor.data) workflow_object.save() db.session.commit() # Start workflow. delayed=True will execute the workflow in the # background using, for example, Celery. start.delay("literature", object_id=workflow_object.id) return redirect(url_for('.success'))
def submit(): """Get form data and start workflow.""" form = LiteratureForm(formdata=request.form) visitor = DataExporter() visitor.visit(form) workflow_object = WorkflowObject.create_object( id_user=current_user.get_id()) workflow_object.data = visitor.data workflow_object.save() db.session.commit() # Start workflow. delayed=True will execute the workflow in the # background using, for example, Celery. start.delay("literature", object_id=workflow_object.id) return redirect(url_for('.success'))
def submitupdate(): """Form action handler for INSPIRE author update form.""" form = AuthorUpdateForm(formdata=request.form) visitor = DataExporter() visitor.visit(form) workflow_object = WorkflowObject.create_object( id_user=current_user.get_id()) workflow_object.data = visitor.data workflow_object.save() db.session.commit() # Start workflow. delay will execute the workflow in the background start.delay("authorupdate", object_id=workflow_object.id) ctx = {"inspire_url": get_inspire_url(visitor.data)} return render_template('authors/forms/update_success.html', **ctx)
def submitupdate(): """Form action handler for INSPIRE author update form.""" form = AuthorUpdateForm(formdata=request.form) visitor = DataExporter() visitor.visit(form) workflow_object = WorkflowObject.create_object(id_user=current_user.get_id()) workflow_object.data = visitor.data workflow_object.save() db.session.commit() # Start workflow. delay will execute the workflow in the background start.delay("authorupdate", object_id=workflow_object.id) ctx = { "inspire_url": get_inspire_url(visitor.data) } return render_template('authors/forms/update_success.html', **ctx)
def submit_results(job_id, results_uri, **kwargs): """Check results for current job.""" results_path = urlparse(results_uri).path if not os.path.exists(results_path): raise CrawlerInvalidResultsPath( "Path specified in result does not exist: {0}".format(results_path) ) job = CrawlerJob.query.get(job_id) if not job: raise CrawlerJobVerificationError( "Cannot find job id: {0}".format(job_id) ) with open(results_path) as records: for line in records.readlines(): record = json.loads(line) obj = WorkflowObject.create_object() obj.extra_data['crawler_job_id'] = job_id obj.extra_data['crawler_results_path'] = results_path obj.extra_data['record_extra'] = record.pop('extra_data', {}) obj.data_type = current_app.config['CRAWLER_DATA_TYPE'] obj.data = record obj.start_workflow(job.workflow, delayed=True)