def test_filter_sql_equivalent(self): """ Test applying a filter on DB. """ data_type = Datatype1() data_type.row1 = "value1" data_type.row2 = "value2" datatypes_factory.DatatypesFactory()._store_datatype(data_type) data_type = Datatype1() data_type.row1 = "value3" data_type.row2 = "value2" datatypes_factory.DatatypesFactory()._store_datatype(data_type) data_type = Datatype1() data_type.row1 = "value1" data_type.row2 = "value3" datatypes_factory.DatatypesFactory()._store_datatype(data_type) test_filter_1 = FilterChain(fields=[FilterChain.datatype + '._row1'], operations=['=='], values=['value1']) test_filter_2 = FilterChain(fields=[FilterChain.datatype + '._row1'], operations=['=='], values=['vaue2']) test_filter_3 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'], operations=['==', 'in'], values=["value1", ['value1', 'value2']]) test_filter_4 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'], operations=['==', 'in'], values=["value1", ['value5', 'value6']]) all_stored_dts = self.count_all_entities(Datatype1) self.assertEqual(3, all_stored_dts) self._evaluate_db_filter(test_filter_1, 2) self._evaluate_db_filter(test_filter_2, 0) self._evaluate_db_filter(test_filter_3, 1) self._evaluate_db_filter(test_filter_4, 0)
def test_get_project_structure(self): """ Tests project structure is as expected and contains all datatypes """ SELF_DTS_NUMBER = 3 dt_factory_1 = datatypes_factory.DatatypesFactory() self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER) dt_group = dt_factory_1.create_datatype_group() link_ids, expected_links = [], [] # Prepare link towards a simple DT dt_factory_2 = datatypes_factory.DatatypesFactory() dt_to_link = dt_factory_2.create_simple_datatype() link_ids.append(dt_to_link.id) expected_links.append(dt_to_link.gid) # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree link_gr = dt_factory_2.create_datatype_group() dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id) link_ids.extend([dt_to_link.id for dt_to_link in dts]) link_ids.append(link_gr.id) expected_links.append(link_gr.gid) # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree link_gr = dt_factory_2.create_datatype_group() dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0] link_ids.append(dt_to_link.id) expected_links.append(dt_to_link.gid) # Actually create the links from Prj2 into Prj1 FlowService().create_link(link_ids, dt_factory_1.project.id) # Retrieve the raw data used to compose the tree (for easy parsing) dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id) dts_in_tree = [dt.gid for dt in dts_in_tree] # Retrieve the tree json (for trivial validations only, as we can not decode) node_json = self.project_service.get_project_structure( dt_factory_1.project, None, DataTypeMetaData.KEY_STATE, DataTypeMetaData.KEY_SUBJECT, None) assert len(expected_links) + SELF_DTS_NUMBER + 2 == len( dts_in_tree), "invalid number of nodes in tree" assert not link_gr.gid in dts_in_tree, "DT_group where a single DT is linked is not expected." assert dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!" assert dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!" project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id) for dt in project_dts: if dt.fk_datatype_group is not None: assert not dt.gid in node_json, "DTs part of a group should not be" assert not dt.gid in dts_in_tree, "DTs part of a group should not be" else: assert dt.gid in node_json, "Simple DTs and DT_Groups should be" assert dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be" for link_gid in expected_links: assert link_gid in node_json, "Expected Link not present" assert link_gid in dts_in_tree, "Expected Link not present"
def __create_complex_workflow(self, workflow_step_list): """ Creates a burst with a complex workflow with a given list of workflow steps. :param workflow_step_list: a list of workflow steps that will be used in the creation of a new workflow for a new burst """ burst_config = TestFactory.store_burst(self.test_project.id) stored_dt = datatypes_factory.DatatypesFactory()._store_datatype( Datatype1()) first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class( "tvb.tests.framework.adapters.testadapter1", "TestAdapterDatatypeInput") metadata = {DataTypeMetaData.KEY_BURST: burst_config.id} kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'} operations, group = self.operation_service.prepare_operations( self.test_user.id, self.test_project.id, first_step_algorithm, first_step_algorithm.algorithm_category, metadata, **kwargs) workflows = self.workflow_service.create_and_store_workflow( project_id=self.test_project.id, burst_id=burst_config.id, simulator_index=0, simulator_id=first_step_algorithm.id, operations=operations) self.operation_service.prepare_operations_for_workflowsteps( workflow_step_list, workflows, self.test_user.id, burst_config.id, self.test_project.id, group, operations) #fire the first op if len(operations) > 0: self.operation_service.launch_operation(operations[0].id, False) return burst_config.id
def test_retrieve_project_full(self): """ Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)` """ dt_factory = datatypes_factory.DatatypesFactory() self._create_datatypes(dt_factory, 3) _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(dt_factory.project.id) assert ops_nr == 1, "DataType Factory should only use one operation to store all it's datatypes." assert pages_no == 1, "DataType Factory should only use one operation to store all it's datatypes." resulted_dts = operations[0]['results'] assert len(resulted_dts) == 3, "3 datatypes should be created."
def _prepare_and_launch_sync_burst(self): """ Private method to launch a dummy burst. Return the burst loaded after the launch finished as well as the workflow steps that initially formed the burst. NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual simulation, but instead test adapters. """ burst_config = TestFactory.store_burst(self.test_project.id) workflow_step_list = [] test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID) stored_dt = datatypes_factory.DatatypesFactory()._store_datatype( Datatype1()) first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class( "tvb.tests.framework.adapters.testadapter1", "TestAdapterDatatypeInput") metadata = {DataTypeMetaData.KEY_BURST: burst_config.id} kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'} operations, group = self.operation_service.prepare_operations( self.test_user.id, self.test_project.id, first_step_algorithm, first_step_algorithm.algorithm_category, metadata, **kwargs) view_step = TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter2", "TestAdapter2", {"test2": 2}, {}, 0, 0, 0, 0, is_view_step=True) view_step.fk_portlet = test_portlet.id workflow_step_list.append(view_step) workflows = self.workflow_service.create_and_store_workflow( self.test_project.id, burst_config.id, 0, first_step_algorithm.id, operations) self.operation_service.prepare_operations_for_workflowsteps( workflow_step_list, workflows, self.test_user.id, burst_config.id, self.test_project.id, group, operations) ### Now fire the workflow and also update and store the burst configuration ## self.operation_service.launch_operation(operations[0].id, False) loaded_burst, _ = self.burst_service.load_burst(burst_config.id) import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation) dao.remove_entity(import_operation.__class__, import_operation.id) dao.remove_datatype(stored_dt.gid) return loaded_burst, workflow_step_list