def create_datatype_group(self, subject=USER_FULL_NAME, state=DATATYPE_STATE, ): """ This method creates, stores and returns a DataTypeGroup entity. """ group = model.OperationGroup(self.project.id, ranges=[json.dumps(self.RANGE_1), json.dumps(self.RANGE_2)]) group = dao.store_entity(group) group_ms = model.OperationGroup(self.project.id, ranges=[json.dumps(self.RANGE_1), json.dumps(self.RANGE_2)]) group_ms = dao.store_entity(group_ms) datatype_group = model.DataTypeGroup(group, subject=subject, state=state, operation_id=self.operation.id) # Set storage path, before setting data datatype_group.storage_path = self.files_helper.get_project_folder(self.project, str(self.operation.id)) datatype_group = dao.store_entity(datatype_group) dt_group_ms = model.DataTypeGroup(group_ms, subject=subject, state=state, operation_id=self.operation.id) # Set storage path, before setting data dt_group_ms.storage_path = self.files_helper.get_project_folder(self.project, str(self.operation.id)) dao.store_entity(dt_group_ms) # Now create some data types and add them to group for range_val1 in self.RANGE_1[1]: for range_val2 in self.RANGE_2[1]: operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status=model.STATUS_FINISHED, range_values=json.dumps({self.RANGE_1[0]: range_val1, self.RANGE_2[0]: range_val2})) operation.fk_operation_group = group.id operation = dao.store_entity(operation) datatype = self.create_datatype_with_storage(operation_id=operation.id) datatype.number1 = range_val1 datatype.number2 = range_val2 datatype.fk_datatype_group = datatype_group.id datatype.set_operation_id(operation.id) dao.store_entity(datatype) op_ms = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status=model.STATUS_FINISHED, range_values=json.dumps({self.RANGE_1[0]: range_val1, self.RANGE_2[0]: range_val2})) op_ms.fk_operation_group = group_ms.id op_ms = dao.store_entity(op_ms) self.create_datatype_measure(datatype, op_ms, FilesHelper().get_project_folder(self.project, str(op_ms.id))) return datatype_group
def prepare_operations_for_workflowsteps(self, workflow_step_list, workflows, user_id, burst_id, project_id, group, sim_operations): """ Create and store Operation entities from a list of Workflow Steps. Will be generated workflows x workflow_step_list Operations. For every step in workflow_step_list one OperationGroup and one DataTypeGroup will be created (in case of PSE). """ for step in workflow_step_list: operation_group = None if (group is not None) and not isinstance(step, model.WorkflowStepView): operation_group = model.OperationGroup(project_id=project_id, ranges=group.range_references) operation_group = dao.store_entity(operation_group) operation = None metadata = {DataTypeMetaData.KEY_BURST: burst_id} algo_category = dao.get_algorithm_by_id(step.fk_algorithm) if algo_category is not None: algo_category = algo_category.algorithm_category for wf_idx, workflow in enumerate(workflows): cloned_w_step = step.clone() cloned_w_step.fk_workflow = workflow.id dynamic_params = cloned_w_step.dynamic_param op_params = cloned_w_step.static_param op_params.update(dynamic_params) range_values = None group_id = None if operation_group is not None: group_id = operation_group.id range_values = sim_operations[wf_idx].range_values if not isinstance(step, model.WorkflowStepView): ## For visualization steps, do not create operations, as those are not really needed. metadata, user_group = self._prepare_metadata(metadata, algo_category, operation_group, op_params) operation = model.Operation(user_id, project_id, step.fk_algorithm, json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder), meta=json.dumps(metadata), op_group_id=group_id, range_values=range_values, user_group=user_group) operation.visible = step.step_visible operation = dao.store_entity(operation) cloned_w_step.fk_operation = operation.id dao.store_entity(cloned_w_step) if operation_group is not None and operation is not None: datatype_group = model.DataTypeGroup(operation_group, operation_id=operation.id, fk_parent_burst=burst_id, state=metadata[DataTypeMetaData.KEY_STATE]) dao.store_entity(datatype_group)
def test_get_inputs_for_op_group(self): """ Tests method get_datatypes_inputs_for_operation_group. The DataType inputs will be from a DataType group. """ project, dt_group_id, first_dt, second_dt = self._create_datatype_group() first_dt.visible = False dao.store_entity(first_dt) second_dt.visible = False dao.store_entity(second_dt) op_group = model.OperationGroup(project.id, "group", "range1[1..2]") op_group = dao.store_entity(op_group) params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"}) params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"}) algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') algo = dao.get_algorithm_by_group(algo_group.id) op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id) op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id) dao.store_entities([op1, op2]) inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 0) inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.") first_dt.visible = True dao.store_entity(first_dt) inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.") inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
def create_datatype_group( self, subject=USER_FULL_NAME, state=DATATYPE_STATE, ): """ This method creates, stores and returns a DataTypeGroup entity. """ OPERATION_GROUP_RANGE = [json.dumps(["row1", ['a', 'b', 'c']])] group = model.OperationGroup(self.project.id, self.OPERATION_GROUP_NAME, OPERATION_GROUP_RANGE) group = dao.store_entity(group) datatype_group = model.DataTypeGroup(group, subject=subject, state=state, operation_id=self.operation.id) # Set storage path, before setting data datatype_group.storage_path = self.files_helper.get_project_folder( self.project, str(self.operation.id)) datatype_group = dao.store_entity(datatype_group) # Now create some data types and add them to group for range_val in ['a', 'b', 'c']: operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status="FINISHED", method_name=ABCAdapter.LAUNCH_METHOD, range_values=json.dumps( {'row1': range_val})) operation.fk_operation_group = group.id operation = dao.store_entity(operation) datatype = self.create_datatype_with_storage( operation_id=operation.id) datatype.row1 = range_val datatype.fk_datatype_group = datatype_group.id datatype.set_operation_id(operation.id) dao.store_entity(datatype) return datatype_group
def _prepare_group(self, project_id, kwargs): """ Create and store OperationGroup entity, or return None """ # Standard ranges as accepted from UI range1_values = self.get_range_values(kwargs, self._range_name(1)) range2_values = self.get_range_values(kwargs, self._range_name(2)) available_args = self.__expand_arguments([(kwargs, None)], range1_values, self._range_name(1)) available_args = self.__expand_arguments(available_args, range2_values, self._range_name(2)) is_group = False ranges = [] if self._range_name(1) in kwargs and range1_values is not None: is_group = True ranges.append( json.dumps((kwargs[self._range_name(1)], range1_values))) if self._range_name(2) in kwargs and range2_values is not None: is_group = True ranges.append( json.dumps((kwargs[self._range_name(2)], range2_values))) # Now for additional ranges which might be the case for the 'model exploration' last_range_idx = 3 ranger_name = self._range_name(last_range_idx) while ranger_name in kwargs: values_for_range = self.get_range_values(kwargs, ranger_name) available_args = self.__expand_arguments(available_args, values_for_range, ranger_name) last_range_idx += 1 ranger_name = self._range_name(last_range_idx) if last_range_idx > 3: ranges = [ ] # Since we only have 3 fields in db for this just hide it if not is_group: group = None else: group = model.OperationGroup(project_id=project_id, ranges=ranges) group = dao.store_entity(group) return available_args, group
def test_get_inputs_for_op_group_simple_inputs(self): """ Tests method get_datatypes_inputs_for_operation_group. The dataType inputs will not be part of a dataType group. """ #it's a list of 3 elem. array_wrappers = self._create_mapped_arrays(self.test_project.id) array_wrapper_ids = [] for datatype in array_wrappers: array_wrapper_ids.append(datatype[0]) datatype = dao.get_datatype_by_id(array_wrapper_ids[0]) datatype.visible = False dao.store_entity(datatype) op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]") op_group = dao.store_entity(op_group) params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2], "param_2": array_wrappers[1][2], "param_6": "7"}) params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2], "param_2": array_wrappers[1][2], "param_6": "6"}) algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') algo = dao.get_algorithm_by_group(algo_group.id) op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id) op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id) dao.store_entities([op1, op2]) inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 2) self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter) self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.") self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id]) self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id]) self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])
def _prepare_group(self, project_id, kwargs): """ Create and store OperationGroup entity, or return None """ range1_values = self.__get_range_values(kwargs, PARAM_RANGE_1) range2_values = self.__get_range_values(kwargs, PARAM_RANGE_2) available_args = self.__expand_arguments([(kwargs, None)], range1_values, PARAM_RANGE_1) available_args = self.__expand_arguments(available_args, range2_values, PARAM_RANGE_2) is_group = False ranges = [] if PARAM_RANGE_1 in kwargs and range1_values is not None: is_group = True ranges.append(json.dumps((kwargs[PARAM_RANGE_1], range1_values))) if PARAM_RANGE_2 in kwargs and range2_values is not None: is_group = True ranges.append(json.dumps((kwargs[PARAM_RANGE_2], range2_values))) if not is_group: group = None else: group = model.OperationGroup(project_id=project_id, ranges=ranges) group = dao.store_entity(group) return available_args, group