def add_study(body): """Or any study like object. Body should include a title, and primary_investigator_id """ if 'primary_investigator_id' not in body: raise ApiError( "missing_pi", "Can't create a new study without a Primary Investigator.") if 'title' not in body: raise ApiError("missing_title", "Can't create a new study without a title.") study_model = StudyModel( user_uid=UserService.current_user().uid, title=body['title'], primary_investigator_id=body['primary_investigator_id'], last_updated=datetime.utcnow(), status=StudyStatus.in_progress) session.add(study_model) StudyService.add_study_update_event(study_model, status=StudyStatus.in_progress, event_type=StudyEventType.user, user_uid=g.user.uid) errors = StudyService._add_all_workflow_specs_to_study(study_model) session.commit() study = StudyService().get_study(study_model.id, do_status=True) study_data = StudySchema().dump(study) study_data["errors"] = ApiErrorSchema(many=True).dump(errors) return study_data
def process_workflow_spec(json_file, directory): file_path = os.path.join(directory, json_file) with open(file_path, 'r') as f_open: data = f_open.read() data_obj = json.loads(data) workflow_spec_model = session.query(WorkflowSpecModel).\ filter(WorkflowSpecModel.id == data_obj['id']).\ first() if not workflow_spec_model: category_id = None if data_obj['category'] is not None: category_id = session.query( WorkflowSpecCategoryModel.id).filter( WorkflowSpecCategoryModel.display_name == data_obj['category']['display_name']).scalar() workflow_spec_model = WorkflowSpecModel( id=data_obj['id'], display_name=data_obj['display_name'], description=data_obj['description'], is_master_spec=data_obj['is_master_spec'], category_id=category_id, display_order=data_obj['display_order'], standalone=data_obj['standalone'], library=data_obj['library']) session.add(workflow_spec_model) session.commit() return workflow_spec_model
def reset(workflow_model, clear_data=False, delete_files=False): print('WorkflowProcessor: reset: ') # Try to execute a cancel notify try: wp = WorkflowProcessor(workflow_model) wp.cancel_notify( ) # The executes a notification to all endpoints that except Exception as e: app.logger.error( f"Unable to send a cancel notify for workflow %s during a reset." f" Continuing with the reset anyway so we don't get in an unresolvable" f" state. An %s error occured with the following information: %s" % (workflow_model.id, e.__class__.__name__, str(e))) workflow_model.bpmn_workflow_json = None if clear_data: # Clear form_data from task_events task_events = session.query(TaskEventModel). \ filter(TaskEventModel.workflow_id == workflow_model.id).all() for task_event in task_events: task_event.form_data = {} session.add(task_event) if delete_files: files = FileModel.query.filter( FileModel.workflow_id == workflow_model.id).all() for file in files: FileService.delete_file(file.id) session.commit() return WorkflowProcessor(workflow_model)
def delete_spec_file(file_id): """This should remove the record in the file table, and both files on the filesystem.""" sync_file_root = SpecFileService.get_sync_file_root() file_model = session.query(FileModel).filter( FileModel.id == file_id).first() workflow_spec_id = file_model.workflow_spec_id workflow_spec_model = session.query(WorkflowSpecModel).filter( WorkflowSpecModel.id == workflow_spec_id).first() category_name = SpecFileService.get_spec_file_category_name( workflow_spec_model) file_model_name = file_model.name spec_directory_path = os.path.join(sync_file_root, category_name, workflow_spec_model.display_name) file_path = os.path.join(spec_directory_path, file_model_name) json_file_path = os.path.join(spec_directory_path, f'{file_model_name}.json') try: os.remove(file_path) os.remove(json_file_path) session.delete(file_model) session.commit() except IntegrityError as ie: session.rollback() file_model = session.query(FileModel).filter_by(id=file_id).first() file_model.archived = True session.commit() app.logger.info( "Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))
def update_workflow_spec_file_model(workflow_spec: WorkflowSpecModel, file_model: FileModel, binary_data, content_type): # Verify the extension file_extension = FileService.get_extension(file_model.name) if file_extension not in FileType._member_names_: raise ApiError( 'unknown_extension', 'The file you provided does not have an accepted extension:' + file_extension, status_code=404) else: file_model.type = FileType[file_extension] file_model.content_type = content_type file_model.archived = False # Unarchive the file if it is archived. # If this is a BPMN, extract the process id. if file_model.type == FileType.bpmn: try: bpmn: etree.Element = etree.fromstring(binary_data) file_model.primary_process_id = SpecFileService.get_process_id( bpmn) file_model.is_review = FileService.has_swimlane(bpmn) except etree.XMLSyntaxError as xse: raise ApiError("invalid_xml", "Failed to parse xml: " + str(xse), file_name=file_model.name) session.add(file_model) session.commit() return file_model
def test_update_workflow_specification(self): self.load_example_data() category_id = 99 category = WorkflowSpecCategoryModel(id=category_id, name='trap', display_name="It's a trap!", display_order=0) session.add(category) session.commit() db_spec_before: WorkflowSpecModel = session.query( WorkflowSpecModel).first() spec_id = db_spec_before.id self.assertNotEqual(db_spec_before.category_id, category_id) db_spec_before.category_id = category_id rv = self.app.put('/v1.0/workflow-specification/%s' % spec_id, content_type="application/json", headers=self.logged_in_headers(), data=json.dumps( WorkflowSpecModelSchema().dump(db_spec_before))) self.assert_success(rv) json_data = json.loads(rv.get_data(as_text=True)) api_spec = WorkflowSpecModelSchema().load(json_data, session=session) self.assertEqual(db_spec_before, api_spec) db_spec_after: WorkflowSpecModel = session.query( WorkflowSpecModel).filter_by(id=spec_id).first() self.assertIsNotNone(db_spec_after.category_id) self.assertIsNotNone(db_spec_after.category) self.assertEqual(db_spec_after.category.display_name, category.display_name) self.assertEqual(db_spec_after.category.display_order, category.display_order)
def delete_workflow_specification(spec_id): if spec_id is None: raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.') spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by( id=spec_id).first() if spec is None: raise ApiError( 'unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.') # Delete all items in the database related to the deleted workflow spec. files = session.query(FileModel).filter_by(workflow_spec_id=spec_id).all() for file in files: FileService.delete_file(file.id) session.query(TaskEventModel).filter( TaskEventModel.workflow_spec_id == spec_id).delete() # Delete all events and workflow models related to this specification for workflow in session.query(WorkflowModel).filter_by( workflow_spec_id=spec_id): StudyService.delete_workflow(workflow) session.query(WorkflowSpecModel).filter_by(id=spec_id).delete() session.commit()
def test_delete_study_with_workflow_and_status_etc(self): self.load_example_data() workflow = session.query(WorkflowModel).first() stats1 = StudyEvent( study_id=workflow.study_id, status=StudyStatus.in_progress, comment='Some study status change event', event_type=StudyEventType.user, user_uid=self.users[0]['uid'], ) LdapService.user_info('dhf8r') # Assure that there is a dhf8r in ldap for StudyAssociated. email = EmailModel(subject="x", study_id=workflow.study_id) associate = StudyAssociated(study_id=workflow.study_id, uid=self.users[0]['uid']) event = StudyEvent(study_id=workflow.study_id) session.add_all([email, associate, event]) stats2 = TaskEventModel(study_id=workflow.study_id, workflow_id=workflow.id, user_uid=self.users[0]['uid']) session.add_all([stats1, stats2]) session.commit() rv = self.app.delete('/v1.0/study/%i' % workflow.study_id, headers=self.logged_in_headers()) self.assert_success(rv) del_study = session.query(StudyModel).filter(StudyModel.id == workflow.study_id).first() self.assertIsNone(del_study)
def clean_db(): session.flush( ) # Clear out any transactions before deleting it all to avoid spurious errors. for table in reversed(db.metadata.sorted_tables): session.execute(table.delete()) session.commit() session.flush()
def do_task(self, task, study_id, workflow_id, *args, **kwargs): # Get new status if 'new_status' in kwargs.keys() or len(args) > 0: if 'new_status' in kwargs.keys(): new_status = kwargs['new_status'] else: new_status = args[0] # Get ProgressStatus object for new_status try: progress_status = getattr(ProgressStatus, new_status) # Invalid argument except AttributeError as ae: raise ApiError.from_task(code='invalid_argument', message=f"We could not find a status matching `{new_status}`. Original message: {ae}.", task=task) # Set new status study_model = session.query(StudyModel).filter(StudyModel.id == study_id).first() study_model.progress_status = progress_status session.commit() return study_model.progress_status.value # Missing argument else: raise ApiError.from_task(code='missing_argument', message='You must include the new progress status when calling `set_study_progress_status` script. ', task=task)
def delete_workflow_specification(spec_id): if spec_id is None: raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.') spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by( id=spec_id).first() category_id = spec.category_id if spec is None: raise ApiError( 'unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.') # Delete all workflow models related to this specification WorkflowService.delete_workflow_spec_workflow_models(spec_id) # Delete all files related to this specification WorkflowService.delete_workflow_spec_files(spec_id) # Delete all events related to this specification WorkflowService.delete_workflow_spec_task_events(spec_id) # .delete() doesn't work when we need a cascade. Must grab the record, and explicitly delete workflow_spec = session.query(WorkflowSpecModel).filter_by( id=spec_id).first() session.delete(workflow_spec) session.commit() # Reorder the remaining specs WorkflowService.cleanup_workflow_spec_display_order(category_id)
def drop_workflow_spec_library(spec_id, library_id): validate_spec_and_library(spec_id, library_id) session.query(WorkflowLibraryModel).filter_by( workflow_spec_id=spec_id, library_spec_id=library_id).delete() session.commit() libraries: WorkflowLibraryModel = session.query( WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id).all() return WorkflowLibraryModelSchema(many=True).dump(libraries)
def _create_workflow_model(study: StudyModel, spec): workflow_model = WorkflowModel(status=WorkflowStatus.not_started, study=study, workflow_spec_id=spec.id, last_updated=datetime.now()) session.add(workflow_model) session.commit() return workflow_model
def add_workflow_spec_category(body): WorkflowService.cleanup_workflow_spec_category_display_order() count = session.query(WorkflowSpecCategoryModel).count() body['display_order'] = count schema = WorkflowSpecCategoryModelSchema() new_cat: WorkflowSpecCategoryModel = schema.load(body, session=session) session.add(new_cat) session.commit() return schema.dump(new_cat)
def clean_db(): session.flush() # Clear out any transactions before deleting it all to avoid spurious errors. engine = session.bind.engine connection = engine.connect() for table in reversed(db.metadata.sorted_tables): if engine.dialect.has_table(connection, table): session.execute(table.delete()) session.commit() session.flush()
def update_file(file_model, binary_data, content_type): session.flush( ) # Assure the database is up-to-date before running this. latest_data_model = session.query(FileDataModel). \ filter(FileDataModel.file_model_id == file_model.id).\ order_by(desc(FileDataModel.date_created)).first() md5_checksum = UUID(hashlib.md5(binary_data).hexdigest()) size = len(binary_data) if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash): # This file does not need to be updated, it's the same file. If it is arhived, # then de-arvhive it. file_model.archived = False session.add(file_model) session.commit() return file_model # Verify the extension file_extension = FileService.get_extension(file_model.name) if file_extension not in FileType._member_names_: raise ApiError( 'unknown_extension', 'The file you provided does not have an accepted extension:' + file_extension, status_code=404) else: file_model.type = FileType[file_extension] file_model.content_type = content_type file_model.archived = False # Unarchive the file if it is archived. if latest_data_model is None: version = 1 else: version = latest_data_model.version + 1 try: user_uid = UserService.current_user().uid except ApiError as ae: user_uid = None new_file_data_model = FileDataModel(data=binary_data, file_model_id=file_model.id, file_model=file_model, version=version, md5_hash=md5_checksum, size=size, user_uid=user_uid) session.add_all([file_model, new_file_data_model]) session.commit() session.flush( ) # Assure the id is set on the model before returning it. return file_model
def test_workflow_spec_reorder_bad_order(self): self.load_example_data() self._load_sample_workflow_specs() ordered = session.query(WorkflowSpecModel).\ filter(WorkflowSpecModel.category_id == 0).\ order_by(WorkflowSpecModel.display_order).\ all() # Set bad display_orders spec_model = ordered[0] spec_model.display_order = 1 session.add(spec_model) spec_model = ordered[1] spec_model.display_order = 1 session.add(spec_model) spec_model = ordered[2] spec_model.display_order = 1 session.add(spec_model) session.commit() bad_orders = session.query(WorkflowSpecModel).\ filter(WorkflowSpecModel.category_id == 0).\ order_by(WorkflowSpecModel.display_order).\ all() # Not sure how Postgres chooses an order # when we have multiple specs with display_order == 1 # but it is # test_spec_1, random_fact, test_spec_2, test_spec_3 self.assertEqual(1, bad_orders[0].display_order) self.assertEqual('test_spec_1', bad_orders[0].id) self.assertEqual(1, bad_orders[1].display_order) self.assertEqual('random_fact', bad_orders[1].id) self.assertEqual(1, bad_orders[2].display_order) self.assertEqual('test_spec_2', bad_orders[2].id) self.assertEqual(3, bad_orders[3].display_order) self.assertEqual('test_spec_3', bad_orders[3].id) # Move test_spec_2 up # This should cause a cleanup of the bad display_order numbers rv = self.app.put( f"/v1.0/workflow-specification/test_spec_2/reorder?direction=up", headers=self.logged_in_headers()) # After moving 2 up, the order should be # test_spec_1, test_spec_2, random_fact, test_spec_3 # Make sure we have good display_order numbers too self.assertEqual('test_spec_1', rv.json[0]['id']) self.assertEqual(0, rv.json[0]['display_order']) self.assertEqual('test_spec_2', rv.json[1]['id']) self.assertEqual(1, rv.json[1]['display_order']) self.assertEqual('random_fact', rv.json[2]['id']) self.assertEqual(2, rv.json[2]['display_order']) self.assertEqual('test_spec_3', rv.json[3]['id']) self.assertEqual(3, rv.json[3]['display_order'])
def create_user(self, uid="dhf8r", email="*****@*****.**", display_name="Hoopy Frood"): user = session.query(UserModel).filter(UserModel.uid == uid).first() if user is None: ldap_user = LdapService.user_info(uid) user = UserModel(uid=uid, ldap_info=ldap_user) session.add(user) session.commit() return user
def test_waiting_task_error(self): workflow = self.create_workflow('raise_error') workflow.status = WorkflowStatus.waiting session.commit() status_before = session.query(WorkflowModel.status).filter(WorkflowModel.id == workflow.id).scalar() WorkflowService.do_waiting() status_after = session.query(WorkflowModel.status).filter(WorkflowModel.id == workflow.id).scalar() self.assertEqual('waiting', status_before.value) self.assertEqual('erroring', status_after.value)
def save(self): """Saves the current state of this processor to the database """ self.workflow_model.bpmn_workflow_json = self.serialize() complete_states = [SpiffTask.CANCELLED, SpiffTask.COMPLETED] tasks = list(self.get_all_user_tasks()) self.workflow_model.status = self.get_status() self.workflow_model.total_tasks = len(tasks) self.workflow_model.completed_tasks = sum( 1 for t in tasks if t.state in complete_states) self.workflow_model.last_updated = datetime.utcnow() session.add(self.workflow_model) session.commit()
def update_file_info(file_id, body): if file_id is None: raise ApiError('no_such_file', 'Please provide a valid File ID.') file_model = session.query(FileModel).filter_by(id=file_id).first() if file_model is None: raise ApiError('unknown_file_model', 'The file_model "' + file_id + '" is not recognized.') file_model = FileModelSchema().load(body, session=session) session.add(file_model) session.commit() return FileSchema().dump(to_file_api(file_model))
def delete_study(study_id): session.query(TaskEventModel).filter_by(study_id=study_id).delete() session.query(TaskLogModel).filter_by(study_id=study_id).delete() session.query(StudyAssociated).filter_by(study_id=study_id).delete() session.query(EmailModel).filter_by(study_id=study_id).delete() session.query(StudyEvent).filter_by(study_id=study_id).delete() for workflow in session.query(WorkflowModel).filter_by( study_id=study_id): StudyService.delete_workflow(workflow.id) study = session.query(StudyModel).filter_by(id=study_id).first() session.delete(study) session.commit()
def stop_impersonating(): if not UserService.has_user(): raise ApiError("logged_out", "You are no longer logged in.", status_code=401) # Clear out the current impersonating user. if 'impersonate_user' in g: del g.impersonate_user admin_session: AdminSessionModel = UserService.get_admin_session() if admin_session: session.delete(admin_session) session.commit()
def add_workflow_specification(body): category_id = body['category_id'] WorkflowService.cleanup_workflow_spec_display_order(category_id) count = session.query(WorkflowSpecModel).filter_by( category_id=category_id).count() body['display_order'] = count # Libraries and standalone workflows don't get a category_id if body['library'] is True or body['standalone'] is True: body['category_id'] = None new_spec: WorkflowSpecModel = WorkflowSpecModelSchema().load( body, session=session) session.add(new_spec) session.commit() return WorkflowSpecModelSchema().dump(new_spec)
def update_study(study_id, body): if study_id is None: raise ApiError('unknown_study', 'Please provide a valid Study ID.') study_model = session.query(StudyModel).filter_by(id=study_id).first() if study_model is None: raise ApiError('unknown_study', 'The study "' + study_id + '" is not recognized.') study: Study = StudySchema().load(body) study.update_model(study_model) session.add(study_model) session.commit() return StudySchema().dump(study)
def test_delete_study_with_workflow_and_status(self): self.load_example_data() workflow = session.query(WorkflowModel).first() stats2 = TaskEventModel(study_id=workflow.study_id, workflow_id=workflow.id, user_uid=self.users[0]['uid']) session.add(stats2) session.commit() rv = self.app.delete('/v1.0/study/%i' % workflow.study_id, headers=self.logged_in_headers()) self.assert_success(rv) del_study = session.query(StudyModel).filter( StudyModel.id == workflow.study_id).first() self.assertIsNone(del_study)
def update_irb_code(file_id, irb_doc_code): """Create a new file and associate it with the workflow Please note that the irb_doc_code MUST be a known file in the irb_documents.xslx reference document.""" file_model = session.query(FileModel)\ .filter(FileModel.id == file_id).first() if file_model is None: raise ApiError( "invalid_file_id", "When updating the irb_doc_code for a file, that file_id must already exist " "This file_id is not found in the database '%d'" % file_id) file_model.irb_doc_code = irb_doc_code session.commit() return True
def load_example_data(self, use_crc_data=False, use_rrt_data=False): """use_crc_data will cause this to load the mammoth collection of documents we built up developing crc, use_rrt_data will do the same for hte rrt project, otherwise it depends on a small setup for running tests.""" from example_data import ExampleDataLoader ExampleDataLoader.clean_db() # If in production mode, only add the first user. if app.config['PRODUCTION']: ldap_info = LdapService.user_info(self.users[0]['uid']) session.add( UserModel(uid=self.users[0]['uid'], ldap_info=ldap_info)) else: for user_json in self.users: ldap_info = LdapService.user_info(user_json['uid']) session.add( UserModel(uid=user_json['uid'], ldap_info=ldap_info)) if use_crc_data: ExampleDataLoader().load_all() elif use_rrt_data: ExampleDataLoader().load_rrt() else: ExampleDataLoader().load_test_data() session.commit() for study_json in self.studies: study_model = StudyModel(**study_json) session.add(study_model) StudyService._add_all_workflow_specs_to_study(study_model) session.commit() update_seq = f"ALTER SEQUENCE %s RESTART WITH %s" % ( StudyModel.__tablename__ + '_id_seq', study_model.id + 1) print("Update Sequence." + update_seq) session.execute(update_seq) session.flush() specs = session.query(WorkflowSpecModel).all() self.assertIsNotNone(specs) for spec in specs: files = session.query(FileModel).filter_by( workflow_spec_id=spec.id).all() self.assertIsNotNone(files) self.assertGreater(len(files), 0) for file in files: # file_data = session.query(FileDataModel).filter_by(file_model_id=file.id).all() file_data = SpecFileService().get_spec_file_data(file.id).data self.assertIsNotNone(file_data) self.assertGreater(len(file_data), 0)
def update_datastore(id, body): """allow a modification to a datastore item """ if id is None: raise ApiError('unknown_id', 'Please provide a valid ID.') item = session.query(DataStoreModel).filter_by(id=id).first() if item is None: raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.') DataStoreSchema().load(body, instance=item, session=session) item.last_updated = datetime.utcnow() session.add(item) session.commit() return DataStoreSchema().dump(item)
def update_workflow_specification(spec_id, body): if spec_id is None: raise ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.') spec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first() if spec is None: raise ApiError('unknown_study', 'The spec "' + spec_id + '" is not recognized.') schema = WorkflowSpecModelSchema() spec = schema.load(body, session=session, instance=spec, partial=True) session.add(spec) session.commit() return schema.dump(spec)