def test_list_reference_files(self): ExampleDataLoader.clean_db() file_name = DocumentService.DOCUMENT_LIST filepath = os.path.join(app.root_path, 'static', 'reference', 'irb_documents.xlsx') with open(filepath, 'rb') as myfile: file_data = myfile.read() data = {'file': (io.BytesIO(file_data), file_name)} rv = self.app.post('/v1.0/reference_file', data=data, follow_redirects=True, content_type='multipart/form-data', headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get('/v1.0/reference_file', follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) self.assert_success(rv) json_data = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(json_data)) file = FileModelSchema(many=True).load(json_data, session=session) self.assertEqual(file_name, file[0].name) self.assertTrue(file[0].is_reference)
def load_example_data(self, use_crc_data=False, use_rrt_data=False): """use_crc_data will cause this to load the mammoth collection of documents we built up developing crc, use_rrt_data will do the same for hte rrt project, otherwise it depends on a small setup for running tests.""" from example_data import ExampleDataLoader ExampleDataLoader.clean_db() # If in production mode, only add the first user. if app.config['PRODUCTION']: ldap_info = LdapService.user_info(self.users[0]['uid']) session.add( UserModel(uid=self.users[0]['uid'], ldap_info=ldap_info)) else: for user_json in self.users: ldap_info = LdapService.user_info(user_json['uid']) session.add( UserModel(uid=user_json['uid'], ldap_info=ldap_info)) if use_crc_data: ExampleDataLoader().load_all() elif use_rrt_data: ExampleDataLoader().load_rrt() else: ExampleDataLoader().load_test_data() session.commit() for study_json in self.studies: study_model = StudyModel(**study_json) session.add(study_model) StudyService._add_all_workflow_specs_to_study(study_model) session.commit() update_seq = f"ALTER SEQUENCE %s RESTART WITH %s" % ( StudyModel.__tablename__ + '_id_seq', study_model.id + 1) print("Update Sequence." + update_seq) session.execute(update_seq) session.flush() specs = session.query(WorkflowSpecModel).all() self.assertIsNotNone(specs) for spec in specs: files = session.query(FileModel).filter_by( workflow_spec_id=spec.id).all() self.assertIsNotNone(files) self.assertGreater(len(files), 0) for file in files: # file_data = session.query(FileDataModel).filter_by(file_model_id=file.id).all() file_data = SpecFileService().get_spec_file_data(file.id).data self.assertIsNotNone(file_data) self.assertGreater(len(file_data), 0)
def test_multi_instance_task(self, mock_get): ExampleDataLoader().load_reference_documents() # Enable the protocol builder. app.config['PB_ENABLED'] = True # This depends on getting a list of investigators back from the protocol builder. mock_get.return_value.ok = True mock_get.return_value.text = self.protocol_builder_response( 'investigators.json') workflow = self.create_workflow('multi_instance') # get the first form in the two form workflow. workflow_api = self.get_workflow_api(workflow) navigation = self.get_workflow_api(workflow_api).navigation self.assertEqual( 5, len(navigation)) # Start task, form_task, multi_task, end task self.assertEqual("UserTask", workflow_api.next_task.type) self.assertEqual(MultiInstanceType.sequential.value, workflow_api.next_task.multi_instance_type) self.assertEqual(5, workflow_api.next_task.multi_instance_count) # Assure that the names for each task are properly updated, so they aren't all the same. self.assertEqual("Primary Investigator", workflow_api.next_task.title)
def create_user_with_study_and_workflow(self): # clear it all out. from example_data import ExampleDataLoader ExampleDataLoader.clean_db() # Assure some basic models are in place, This is a damn mess. Our database models need an overhaul to make # this easier - better relationship modeling is now critical. self.load_test_spec("top_level_workflow", master_spec=True) user = db.session.query(UserModel).filter( UserModel.uid == "dhf8r").first() if not user: user = UserModel(uid="dhf8r", email_address="*****@*****.**", display_name="Stayathome Smellalots") db.session.add(user) db.session.commit() else: for study in db.session.query(StudyModel).all(): StudyService().delete_study(study.id) study = StudyModel( title="My title", protocol_builder_status=ProtocolBuilderStatus.ACTIVE, user_uid=user.uid) db.session.add(study) cat = WorkflowSpecCategoryModel(name="approvals", display_name="Approvals", display_order=0) db.session.add(cat) db.session.commit() self.assertIsNotNone(cat.id) self.load_test_spec("random_fact", category_id=cat.id) self.assertIsNotNone(study.id) workflow = WorkflowModel(workflow_spec_id="random_fact", study_id=study.id, status=WorkflowStatus.not_started, last_updated=datetime.now()) db.session.add(workflow) db.session.commit() # Assure there is a master specification, one standard spec, and lookup tables. ExampleDataLoader().load_reference_documents() return user
def load_example_data(self, use_crc_data=False, use_rrt_data=False): """use_crc_data will cause this to load the mammoth collection of documents we built up developing crc, use_rrt_data will do the same for hte rrt project, otherwise it depends on a small setup for running tests.""" from example_data import ExampleDataLoader ExampleDataLoader.clean_db() if use_crc_data: ExampleDataLoader().load_all() elif use_rrt_data: ExampleDataLoader().load_rrt() else: ExampleDataLoader().load_test_data() for user_json in self.users: db.session.add(UserModel(**user_json)) db.session.commit() for study_json in self.studies: study_model = StudyModel(**study_json) db.session.add(study_model) StudyService._add_all_workflow_specs_to_study(study_model) db.session.execute(Sequence(StudyModel.__tablename__ + '_id_seq')) db.session.commit() db.session.flush() specs = session.query(WorkflowSpecModel).all() self.assertIsNotNone(specs) for spec in specs: files = session.query(FileModel).filter_by( workflow_spec_id=spec.id).all() self.assertIsNotNone(files) self.assertGreater(len(files), 0) for spec in specs: files = session.query(FileModel).filter_by( workflow_spec_id=spec.id).all() self.assertIsNotNone(files) self.assertGreater(len(files), 0) for file in files: file_data = session.query(FileDataModel).filter_by( file_model_id=file.id).all() self.assertIsNotNone(file_data) self.assertGreater(len(file_data), 0)
def test_library_code(self): self.load_example_data() spec1 = ExampleDataLoader().create_spec('hello_world', 'Hello World', category_id=0, library=False, from_tests=True) spec2 = ExampleDataLoader().create_spec('hello_world_lib', 'Hello World Library', category_id=0, library=True, from_tests=True) user = session.query(UserModel).first() self.assertIsNotNone(user) rv = self.app.post(f'/v1.0/workflow-specification/%s/library/%s' % (spec1.id, spec2.id), follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get(f'/v1.0/workflow-specification/%s' % spec1.id, follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) returned = rv.json self.assertIsNotNone(returned.get('libraries')) self.assertEqual(len(returned['libraries']), 1) self.assertEqual(returned['libraries'][0].get('id'), 'hello_world_lib') rv = self.app.delete(f'/v1.0/workflow-specification/%s/library/%s' % (spec1.id, spec2.id), follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) rv = self.app.get(f'/v1.0/workflow-specification/%s' % spec1.id, follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) returned = rv.json self.assertIsNotNone(returned.get('libraries')) self.assertEqual(len(returned['libraries']), 0)
def test_get_workflow_from_workflow_spec(self): self.load_example_data() spec = ExampleDataLoader().create_spec('hello_world', 'Hello World', standalone=True, from_tests=True) rv = self.app.post(f'/v1.0/workflow-specification/{spec.id}', headers=self.logged_in_headers()) self.assert_success(rv) self.assertEqual('hello_world', rv.json['workflow_spec_id']) self.assertEqual('Task_GetName', rv.json['next_task']['name'])
def test_list_reference_files(self): ExampleDataLoader.clean_db() file_name = FileService.DOCUMENT_LIST data = {'file': (io.BytesIO(b"abcdef"), file_name)} rv = self.app.put('/v1.0/reference_file/%s' % file_name, data=data, follow_redirects=True, content_type='multipart/form-data', headers=self.logged_in_headers()) rv = self.app.get('/v1.0/reference_file', follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) self.assert_success(rv) json_data = json.loads(rv.get_data(as_text=True)) self.assertEqual(1, len(json_data)) file = FileModelSchema(many=True).load(json_data, session=session) self.assertEqual(file_name, file[0].name) self.assertTrue(file[0].is_reference)
def test_get_standalone_workflow_specs(self): self.load_example_data() category = session.query(WorkflowSpecCategoryModel).first() ExampleDataLoader().create_spec('hello_world', 'Hello World', category_id=category.id, standalone=True, from_tests=True) rv = self.app.get('/v1.0/workflow-specification?standalone=true', headers=self.logged_in_headers()) self.assertEqual(1, len(rv.json)) ExampleDataLoader().create_spec('email_script', 'Email Script', category_id=category.id, standalone=True, from_tests=True) rv = self.app.get('/v1.0/workflow-specification?standalone=true', headers=self.logged_in_headers()) self.assertEqual(2, len(rv.json))
def test_library_cleanup(self): self.load_example_data() spec1 = ExampleDataLoader().create_spec('hello_world', 'Hello World', category_id=0, library=False, from_tests=True) spec2 = ExampleDataLoader().create_spec('hello_world_lib', 'Hello World Library', category_id=0, library=True, from_tests=True) user = session.query(UserModel).first() self.assertIsNotNone(user) rv = self.app.post(f'/v1.0/workflow-specification/%s/library/%s' % (spec1.id, spec2.id), follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) self.assert_success(rv) rv = self.app.get(f'/v1.0/workflow-specification/%s' % spec1.id, follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) returned = rv.json lib = session.query(WorkflowLibraryModel).filter( WorkflowLibraryModel.library_spec_id == spec2.id).first() self.assertIsNotNone(lib) rv = self.app.delete(f'/v1.0/workflow-specification/%s' % (spec1.id), follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) lib = session.query(WorkflowLibraryModel).filter( WorkflowLibraryModel.library_spec_id == spec2.id).first() self.assertIsNone(lib)
def load_test_spec(dir_name, master_spec=False, category_id=None): """Loads a spec into the database based on a directory in /tests/data""" if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0: return session.query(WorkflowSpecModel).filter_by( id=dir_name).first() filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*") return ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath, master_spec=master_spec, category_id=category_id)
def test_no_study_workflow(self): self.load_example_data() spec = ExampleDataLoader().create_spec('hello_world', 'Hello World', standalone=True, from_tests=True) user = session.query(UserModel).first() self.assertIsNotNone(user) workflow_model = WorkflowService.get_workflow_from_spec(spec.id, user) workflow_api = self.get_workflow_api(workflow_model) first_task = workflow_api.next_task self.complete_form(workflow_model, first_task, {'name': 'Big Guy'}) workflow_api = self.get_workflow_api(workflow_model) second_task = workflow_api.next_task self.assertEqual(second_task.documentation, 'Hello Big Guy')
def test_add_reference_file(self): ExampleDataLoader().load_reference_documents() file_name = 'new.xlsx' data = {'file': (io.BytesIO(b"abcdef"), file_name)} rv = self.app.post('/v1.0/reference_file', data=data, follow_redirects=True, content_type='multipart/form-data', headers=self.logged_in_headers()) self.assertIsNotNone(rv.get_data()) json_data = json.loads(rv.get_data(as_text=True)) file = FileModelSchema().load(json_data, session=session) self.assertEqual(FileType.xlsx, file.type) self.assertFalse(file.primary) self.assertEqual(True, file.is_reference)
def test_get_task_events(self): self.load_example_data() spec = ExampleDataLoader().create_spec('hello_world', 'Hello World', category_id=0, standalone=True, from_tests=True) user = session.query(UserModel).first() self.assertIsNotNone(user) WorkflowService.get_workflow_from_spec(spec.id, user) rv = self.app.get(f'/v1.0/task_events', follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) self.assert_success(rv)
def test_delete_reference_file(self): ExampleDataLoader().load_reference_documents() reference_file = session.query(FileModel).filter( FileModel.is_reference == True).first() rv = self.app.get('/v1.0/reference_file/%s' % reference_file.name, headers=self.logged_in_headers()) self.assert_success(rv) self.app.delete('/v1.0/reference_file/%s' % reference_file.name, headers=self.logged_in_headers()) db.session.flush() rv = self.app.get('/v1.0/reference_file/%s' % reference_file.name, headers=self.logged_in_headers()) self.assertEqual(404, rv.status_code) self.assertIsNotNone(rv.get_data()) json_data = json.loads(rv.get_data(as_text=True)) self.assertIn('The reference file name you provided', json_data['message'])
def test_get_reference_file_data(self): ExampleDataLoader().load_reference_documents() file_name = "irb_document_types.xls" filepath = os.path.join(app.root_path, 'static', 'reference', 'irb_documents.xlsx') with open(filepath, 'rb') as f_open: file_data = f_open.read() data = {'file': (io.BytesIO(file_data), file_name)} self.app.post('/v1.0/reference_file', data=data, follow_redirects=True, content_type='multipart/form-data', headers=self.logged_in_headers()) rv = self.app.get('/v1.0/reference_file/%s/data' % file_name, headers=self.logged_in_headers()) self.assert_success(rv) data_out = rv.get_data() self.assertEqual(file_data, data_out)
def test_parallel_multi_instance(self, mock_get): # Assure we get nine investigators back from the API Call, as set in the investigators.json file. app.config['PB_ENABLED'] = True mock_get.return_value.ok = True mock_get.return_value.text = self.protocol_builder_response( 'investigators.json') ExampleDataLoader().load_reference_documents() workflow = self.create_workflow('multi_instance_parallel') workflow_api = self.get_workflow_api(workflow) self.assertEqual(9, len(workflow_api.navigation)) ready_items = [ nav for nav in workflow_api.navigation if nav.state == "READY" ] self.assertEqual(5, len(ready_items)) self.assertEqual("UserTask", workflow_api.next_task.type) self.assertEqual("MultiInstanceTask", workflow_api.next_task.name) self.assertEqual("Primary Investigator", workflow_api.next_task.title) for i in random.sample(range(5), 5): task_id = ready_items[i].task_id rv = self.app.put('/v1.0/workflow/%i/task/%s/set_token' % (workflow.id, task_id), headers=self.logged_in_headers(), content_type="application/json") self.assert_success(rv) json_data = json.loads(rv.get_data(as_text=True)) workflow_api = WorkflowApiSchema().load(json_data) data = workflow_api.next_task.data data['investigator']['email'] = "*****@*****.**" self.complete_form(workflow, workflow_api.next_task, data) #tasks = self.get_workflow_api(workflow).user_tasks workflow_api = self.get_workflow_api(workflow) self.assertEqual(WorkflowStatus.complete, workflow_api.status)
def test_parallel_multi_instance_update_all(self, mock_get): # Assure we get nine investigators back from the API Call, as set in the investigators.json file. app.config['PB_ENABLED'] = True mock_get.return_value.ok = True mock_get.return_value.text = self.protocol_builder_response( 'investigators.json') ExampleDataLoader().load_reference_documents() workflow = self.create_workflow('multi_instance_parallel') workflow_api = self.get_workflow_api(workflow) self.assertEqual(9, len(workflow_api.navigation)) ready_items = [ nav for nav in workflow_api.navigation if nav.state == "READY" ] self.assertEqual(5, len(ready_items)) self.assertEqual("UserTask", workflow_api.next_task.type) self.assertEqual("MultiInstanceTask", workflow_api.next_task.name) self.assertEqual("Primary Investigator", workflow_api.next_task.title) data = workflow_api.next_task.data data['investigator']['email'] = "*****@*****.**" self.complete_form(workflow, workflow_api.next_task, data, update_all=True) workflow = self.get_workflow_api(workflow) self.assertEqual(WorkflowStatus.complete, workflow.status) data = workflow.next_task.data for key in data["StudyInfo"]["investigators"]: self.assertEqual("*****@*****.**", data["StudyInfo"]["investigators"][key]['email'])
def load_test_spec(dir_name, display_name=None, master_spec=False, category_id=None): """Loads a spec into the database based on a directory in /tests/data""" if category_id is None: category = WorkflowSpecCategoryModel(display_name="Test Workflows", display_order=0) session.add(category) session.commit() category_id = category.id if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0: return session.query(WorkflowSpecModel).filter_by( id=dir_name).first() filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*") if display_name is None: display_name = dir_name return ExampleDataLoader().create_spec(id=dir_name, filepath=filepath, master_spec=master_spec, display_name=display_name, category_id=category_id)
def tearDown(self): ExampleDataLoader.clean_db() self.logout() self.auths = {} self.clear_test_sync_files()
def load_example_rrt_data(): """Load example data into the database.""" from example_data import ExampleDataLoader ExampleDataLoader.clean_db() ExampleDataLoader().load_rrt()
def load_example_data(): """Load example data into the database.""" from example_data import ExampleDataLoader ExampleDataLoader.clean_db() ExampleDataLoader().load_all() ExampleDataLoader().load_default_user()
def load_reference_files(): """Load example data into the database.""" from example_data import ExampleDataLoader ExampleDataLoader().load_reference_documents()
def delete_example_data(self, use_crc_data=False, use_rrt_data=False): """ delete everything that matters in the local database - this is used to test ground zero copy of workflow specs. """ ExampleDataLoader.clean_db()
def tearDown(self): ExampleDataLoader.clean_db() g.user = None self.auths = {}
def clear_db(): """Load example data into the database.""" from example_data import ExampleDataLoader ExampleDataLoader.clean_db()