def test_remove_datatype_from_group(self, datatype_group_factory, project_factory, user_factory): """ Tests the deletion of a datatype group. """ user = user_factory() project = project_factory(user) group = datatype_group_factory(project=project) datatype_group = dao.get_generic_entity(DataTypeGroup, group.id)[0] datatypes = dao.get_datatypes_from_datatype_group(group.id) datatype_measure = dao.get_generic_entity(DatatypeMeasureIndex, datatypes[0].gid, "fk_source_gid")[0] # When trying to delete one entity in a group the entire group will be removed # First remove the DTMeasures, to avoid FK failures self.project_service.remove_datatype(project.id, datatype_measure.gid) self.project_service.remove_datatype(project.id, datatypes[0].gid) self._check_if_datatype_was_removed(datatypes[0]) self._check_if_datatype_was_removed(datatypes[1]) self._check_if_datatype_was_removed(datatype_group) self._check_if_datatype_was_removed(datatype_measure) self._check_datatype_group_removed(group.id, datatype_group.fk_operation_group)
def get_launchable_algorithms(self, datatype_gid): """ :param datatype_gid: Filter only algorithms compatible with this GUID :return: dict(category_name: List AlgorithmTransientGroup) """ categories = dao.get_launchable_categories() datatype_instance, filtered_adapters, has_operations_warning = self._get_launchable_algorithms( datatype_gid, categories) if isinstance(datatype_instance, DataTypeGroup): # If part of a group, update also with specific analyzers of the child datatype dt_group = dao.get_datatype_group_by_gid(datatype_gid) datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) if len(datatypes): datatype = datatypes[-1] analyze_category = dao.get_launchable_categories(True) _, inner_analyzers, _ = self._get_launchable_algorithms( datatype.gid, analyze_category) filtered_adapters.extend(inner_analyzers) categories_dict = dict() for c in categories: categories_dict[c.id] = c.displayname return self._group_adapters_by_category( filtered_adapters, categories_dict), has_operations_warning
def test_getdatatypes_from_dtgroup(self, datatype_group_factory): """ Validate that we can retrieve all DTs from a DT_Group """ group = datatype_group_factory() exp_datatypes = dao.get_datatypes_from_datatype_group(group.id) datatypes = self.project_service.get_datatypes_from_datatype_group( group.id) assert len( datatypes ) == group.count_results, "There should be 10 datatypes into the datatype group." expected_dict = { exp_datatypes[0].id: exp_datatypes[0], exp_datatypes[1].id: exp_datatypes[1] } actual_dict = { datatypes[0].id: datatypes[0], datatypes[1].id: datatypes[1] } for key in expected_dict: expected = expected_dict[key] actual = actual_dict[key] assert expected.id == actual.id, "Not the same id." assert expected.gid == actual.gid, "Not the same gid." assert expected.type == actual.type, "Not the same type." assert expected.subject == actual.subject, "Not the same subject." assert expected.state == actual.state, "Not the same state." assert expected.visible == actual.visible, "The datatype visibility is not correct." assert expected.module == actual.module, "Not the same module." assert expected.user_tag_1 == actual.user_tag_1, "Not the same user_tag_1." assert expected.invalid == actual.invalid, "The invalid field value is not correct." assert expected.is_nan == actual.is_nan, "The is_nan field value is not correct."
def test_remove_datatype_group(self, datatype_group_factory, project_factory, user_factory): """ Tests the deletion of a datatype group. """ user = user_factory() project = project_factory(user) group, _ = datatype_group_factory(project=project) datatype_groups = self.get_all_entities(DataTypeGroup) datatypes = dao.get_datatypes_from_datatype_group(group.id) assert 2 == len(datatype_groups) self.project_service.remove_datatype(project.id, datatype_groups[1].gid, skip_validation=True) self.project_service.remove_datatype(project.id, datatype_groups[0].gid, skip_validation=True) self._check_if_datatype_was_removed(datatypes[0]) self._check_if_datatype_was_removed(datatypes[1]) self._check_if_datatype_was_removed(datatype_groups[0]) self._check_if_datatype_was_removed(datatype_groups[1]) self._check_datatype_group_removed( group.id, datatype_groups[0].fk_operation_group)
def remove_datatype(self, project_id, datatype_gid, skip_validation=False): """ Method used for removing a dataType. If the given dataType is a DatatypeGroup or a dataType from a DataTypeGroup than this method will remove the entire group. The operation(s) used for creating the dataType(s) will also be removed. """ datatype = dao.get_datatype_by_gid(datatype_gid) if datatype is None: self.logger.warning( "Attempt to delete DT[%s] which no longer exists." % datatype_gid) return is_datatype_group = False if dao.is_datatype_group(datatype_gid): is_datatype_group = True elif datatype.fk_datatype_group is not None: is_datatype_group = True datatype = dao.get_datatype_by_id(datatype.fk_datatype_group) operations_set = [datatype.fk_from_operation] correct = True if is_datatype_group: self.logger.debug("Removing datatype group %s" % datatype) data_list = dao.get_datatypes_from_datatype_group(datatype.id) for adata in data_list: self._remove_project_node_files(project_id, adata.gid, skip_validation) if adata.fk_from_operation not in operations_set: operations_set.append(adata.fk_from_operation) datatype_group = dao.get_datatype_group_by_gid(datatype.gid) dao.remove_datatype(datatype_gid) correct = correct and dao.remove_entity( model.OperationGroup, datatype_group.fk_operation_group) else: self.logger.debug("Removing datatype %s" % datatype) self._remove_project_node_files(project_id, datatype.gid, skip_validation) ## Remove Operation entity in case no other DataType needs them. project = dao.get_project_by_id(project_id) for operation_id in operations_set: dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation") if len(dependent_dt) > 0: ### Do not remove Operation in case DataType still exist referring it. continue correct = correct and dao.remove_entity(model.Operation, operation_id) ## Make sure Operation folder is removed self.structure_helper.remove_operation_data( project.name, datatype.fk_from_operation) if not correct: raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
def _update_dt_groups(self, project_id): dt_groups = dao.get_datatypegroup_for_project(project_id) for dt_group in dt_groups: dt_group.count_results = dao.count_datatypes_in_group(dt_group.id) dts_in_group = dao.get_datatypes_from_datatype_group(dt_group.id) if dts_in_group: dt_group.fk_parent_burst = dts_in_group[0].fk_parent_burst dao.store_entity(dt_group)
def test_count_datatypes_in_group(self, datatype_group_factory): """ Test that counting dataTypes is correct. Happy flow.""" group = datatype_group_factory() count = dao.count_datatypes_in_group(group.id) assert count == 9 datatypes = dao.get_datatypes_from_datatype_group(group.id) count = dao.count_datatypes_in_group(datatypes[0].id) assert count == 0, "There should be no dataType."
def _remove_datatype_group_dts(self, project_id, dt_group_id, skip_validation, operations_set): data_list = dao.get_datatypes_from_datatype_group(dt_group_id) for adata in data_list: self._remove_project_node_files(project_id, adata.gid, skip_validation) if adata.fk_from_operation not in operations_set: operations_set.append(adata.fk_from_operation)
def remove_datatype(self, project_id, datatype_gid, skip_validation=False): """ Method used for removing a dataType. If the given dataType is a DatatypeGroup or a dataType from a DataTypeGroup than this method will remove the entire group. The operation(s) used for creating the dataType(s) will also be removed. """ datatype = dao.get_datatype_by_gid(datatype_gid) if datatype is None: self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid) return user = dao.get_user_for_datatype(datatype.id) freed_space = datatype.disk_size or 0 is_datatype_group = False if dao.is_datatype_group(datatype_gid): is_datatype_group = True freed_space = dao.get_datatype_group_disk_size(datatype.id) elif datatype.fk_datatype_group is not None: is_datatype_group = True datatype = dao.get_datatype_by_id(datatype.fk_datatype_group) freed_space = dao.get_datatype_group_disk_size(datatype.id) operations_set = [datatype.fk_from_operation] correct = True if is_datatype_group: self.logger.debug("Removing datatype group %s" % datatype) data_list = dao.get_datatypes_from_datatype_group(datatype.id) for adata in data_list: self._remove_project_node_files(project_id, adata.gid, skip_validation) if adata.fk_from_operation not in operations_set: operations_set.append(adata.fk_from_operation) datatype_group = dao.get_datatype_group_by_gid(datatype.gid) dao.remove_datatype(datatype_gid) correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group) else: self.logger.debug("Removing datatype %s" % datatype) self._remove_project_node_files(project_id, datatype.gid, skip_validation) ## Remove Operation entity in case no other DataType needs them. project = dao.get_project_by_id(project_id) for operation_id in operations_set: dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation") if len(dependent_dt) > 0: ### Do not remove Operation in case DataType still exist referring it. continue correct = correct and dao.remove_entity(model.Operation, operation_id) ## Make sure Operation folder is removed self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation) if not correct: raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid)) user.used_disk_space = user.used_disk_space - freed_space dao.store_entity(user)
def test_is_datatype_group(self, datatype_group_factory): """ Tests if a datatype is group. """ group = datatype_group_factory() dt_group = dao.get_generic_entity(DataTypeGroup, group.id)[0] is_dt_group = self.project_service.is_datatype_group(dt_group.gid) assert is_dt_group, "The datatype should be a datatype group." datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) is_dt_group = self.project_service.is_datatype_group(datatypes[0].gid) assert not is_dt_group, "The datatype should not be a datatype group."
def test_set_visibility_for_dt_in_group(self, datatype_group_factory): """ Check if the visibility for a datatype from a datatype group is set correct. """ group = datatype_group_factory() datatypes = dao.get_datatypes_from_datatype_group(group.id) assert datatypes[0].visible, "The data type should be visible." assert datatypes[1].visible, "The data type should be visible." self.project_service.set_datatype_visibility(datatypes[0].gid, False) db_dt_group = self.project_service.get_datatype_by_id(group.id) db_first_dt = self.project_service.get_datatype_by_id(datatypes[0].id) db_second_dt = self.project_service.get_datatype_by_id(datatypes[1].id) assert not db_dt_group.visible, "The data type should be visible." assert not db_first_dt.visible, "The data type should not be visible." assert not db_second_dt.visible, "The data type should be visible."
def test_remove_datatype_group(self, datatype_group_factory, project_factory, user_factory): """ Tests the deletion of a datatype group. """ user = user_factory() project = project_factory(user) group = datatype_group_factory(project=project) datatype_group = dao.get_generic_entity(DataTypeGroup, group.id)[0] datatypes = dao.get_datatypes_from_datatype_group(group.id) self.project_service.remove_datatype(project.id, datatype_group.gid) self._check_if_datatype_was_removed(datatypes[0]) self._check_if_datatype_was_removed(datatypes[1]) self._check_if_datatype_was_removed(datatype_group) self._check_datatype_group_removed(group.id, datatype_group.fk_operation_group)
def _create_operations_with_inputs(self, datatype_group, is_group_parent=False): """ Method used for creating a complex tree of operations. If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as input for the returned operations. """ group_dts = dao.get_datatypes_from_datatype_group(datatype_group.id) if is_group_parent: datatype_gid = group_dts[0].gid else: datatype_gid = self._create_value_wrapper(self.test_user, self.test_project)[1] parameters = json.dumps({"param_name": datatype_gid}) ops = [] for i in range(4): ops.append( TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)) if i in [1, 3]: ops[i].visible = False ops[i].parameters = parameters ops[i] = dao.store_entity(ops[i]) # groups ops_group = dao.get_operations_in_group( datatype_group.fk_from_operation) assert 9 == len(ops_group) ops_group[0].parameters = parameters ops_group[0] = dao.store_entity(ops_group[0]) ops_group[1].visible = False ops_group[1].parameters = parameters ops_group[1] = dao.store_entity(ops_group[1]) ops.extend(ops_group) if is_group_parent: dt_group = dao.get_datatypegroup_by_op_group_id(datatype_group.id) return ops, dt_group.id return ops, datatype_gid
def test_set_visibility_for_group(self, datatype_group_factory): """ Check if the visibility for a datatype group is set correct. """ group = datatype_group_factory() dt_group = dao.get_generic_entity(DataTypeGroup, group.id)[0] datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) assert dt_group.visible, "The data type group should be visible." assert datatypes[0].visible, "The data type should be visible." assert datatypes[1].visible, "The data type should be visible." self.project_service.set_datatype_visibility(dt_group.gid, False) updated_dt_group = self.project_service.get_datatype_by_id(dt_group.id) updated_first_dt = self.project_service.get_datatype_by_id( datatypes[0].id) updated_second_dt = self.project_service.get_datatype_by_id( datatypes[1].id) assert not updated_dt_group.visible, "The data type group should be visible." assert not updated_first_dt.visible, "The data type should be visible." assert not updated_second_dt.visible, "The data type should be visible."
def get_launchable_algorithms(self, datatype_gid): """ :param datatype_gid: Filter only algorithms compatible with this GUID :return: dict(category_name: List AlgorithmTransientGroup) """ categories = dao.get_launchable_categories() datatype_instance, filtered_adapters = self._get_launchable_algorithms(datatype_gid, categories) if isinstance(datatype_instance, model.DataTypeGroup): # If part of a group, update also with specific analyzers of the child datatype dt_group = dao.get_datatype_group_by_gid(datatype_gid) datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) if len(datatypes): datatype = datatypes[-1] analyze_category = dao.get_launchable_categories(True) _, inner_analyzers = self._get_launchable_algorithms(datatype.gid, analyze_category) filtered_adapters.extend(inner_analyzers) categories_dict = dict() for c in categories: categories_dict[c.id] = c.displayname return self._group_adapters_by_category(filtered_adapters, categories_dict)
def retrieve_launchers(self, dataname, datatype_gid=None, inspect_group=False, exclude_categories=None): """ Returns all the available launch-able algorithms from the database. Filter the ones accepting as required input a specific DataType. :param dataname: String or class representing DataType to retrieve filters for it. :param datatype_gid: Optional GID, to filter algorithms for this particular entity. :param inspect_group: TRUE if we are now in the inspection of sub-entities in a DataTypeGroup :param exclude_categories: List of categories to be excluded from the result. """ if exclude_categories is None: exclude_categories = [] launch_categ = dao.get_launchable_categories() launch_categ = dict((categ.id, categ.displayname) for categ in launch_categ if categ.id not in exclude_categories) launch_groups = dao.get_apliable_algo_groups(dataname, launch_categ.keys()) if datatype_gid is None: return ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group) try: datatype_instance = dao.get_datatype_by_gid(datatype_gid) data_class = datatype_instance.__class__ for one_class in data_class.__bases__: launch_groups.extend(dao.get_apliable_algo_groups(one_class.__name__, launch_categ.keys())) specific_datatype = dao.get_generic_entity(data_class, datatype_gid, "gid") to_remove = [] for one_group in launch_groups: valid_algorithms = [] for one_algo in one_group.children: filter_chain = FilterChain.from_json(one_algo.datatype_filter) if not filter_chain or filter_chain.get_python_filter_equivalent(specific_datatype[0]): valid_algorithms.append(one_algo) if len(valid_algorithms) > 0: one_group.children = copy.deepcopy(valid_algorithms) else: to_remove.append(one_group) for one_group in to_remove: launch_groups.remove(one_group) del one_group launchers = ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group) if dataname == model.DataTypeGroup.__name__: # If part of a group, update also with specific launchers of that datatype dt_group = dao.get_datatype_group_by_gid(datatype_gid) datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) if len(datatypes): datatype = datatypes[-1] datatype = dao.get_datatype_by_gid(datatype.gid) views_categ_id = dao.get_visualisers_categories()[0].id specific_launchers = self.retrieve_launchers(datatype.__class__.__name__, datatype.gid, True, [views_categ_id] + exclude_categories) for key in specific_launchers: if key in launchers: launchers[key].update(specific_launchers[key]) else: launchers[key] = specific_launchers[key] return launchers except Exception, excep: ProjectService().logger.exception(excep) ProjectService().logger.warning("Attempting to filter launcher for group despite exception!") return ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group)
def test_get_inputs_for_operation(self, datatype_group_factory): """ Tests method get_datatype_and_datatypegroup_inputs_for_operation. Verifies filters' influence over results is as expected """ algo = dao.get_algorithm_by_module( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') array_wrappers = self._create_mapped_arrays(self.test_project.id) ids = [] for datatype in array_wrappers: ids.append(datatype[0]) datatype = dao.get_datatype_by_id(ids[0]) datatype.visible = False dao.store_entity(datatype) parameters = json.dumps({ "param_5": "1", "param_1": array_wrappers[0][2], "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0" }) operation = Operation(self.test_user.id, self.test_project.id, algo.id, parameters) operation = dao.store_entity(operation) inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.relevant_filter) assert len(inputs) == 2 assert ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType." assert ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType." assert not ids[0] in [inputs[0].id, inputs[1].id ], "Retrieved wrong dataType." inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.full_filter) assert len(inputs) == 3, "Incorrect number of operations." assert ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType." assert ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType." assert ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType." group = datatype_group_factory(project=self.test_project) datatypes = dao.get_datatypes_from_datatype_group(group.id) datatypes[0].visible = False dao.store_entity(datatypes[0]) parameters = json.dumps({ "other_param": "_", "param_1": datatypes[0].gid }) operation = Operation(self.test_user.id, self.test_project.id, algo.id, parameters) operation = dao.store_entity(operation) inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.relevant_filter) assert len(inputs) == 0, "Incorrect number of dataTypes." inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.full_filter) assert len(inputs) == 1, "Incorrect number of dataTypes." assert inputs[0].id == group.id, "Wrong dataType." assert inputs[0].id != datatypes[0].id, "Wrong dataType."
def test_get_inputs_for_op_group(self, datatype_group_factory, test_adapter_factory): """ Tests method get_datatypes_inputs_for_operation_group. The DataType inputs will be from a DataType group. """ group = datatype_group_factory(project=self.test_project) datatypes = dao.get_datatypes_from_datatype_group(group.id) datatypes[0].visible = False dao.store_entity(datatypes[0]) datatypes[1].visible = False dao.store_entity(datatypes[1]) op_group = OperationGroup(self.test_project.id, "group", "range1[1..2]") op_group = dao.store_entity(op_group) params_1 = json.dumps({ "param_5": "1", "param_1": datatypes[0].gid, "param_6": "2" }) params_2 = json.dumps({ "param_5": "1", "param_4": datatypes[1].gid, "param_6": "5" }) test_adapter_factory(adapter_class=TestAdapter3) algo = dao.get_algorithm_by_module( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') op1 = Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id) op2 = Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id) dao.store_entities([op1, op2]) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) assert len(inputs) == 0 inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) assert len(inputs) == 1, "Incorrect number of dataTypes." assert not datatypes[0].id == inputs[0].id, "Retrieved wrong dataType." assert not datatypes[1].id == inputs[0].id, "Retrieved wrong dataType." assert group.id == inputs[0].id, "Retrieved wrong dataType." datatypes[0].visible = True dao.store_entity(datatypes[0]) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) assert len(inputs) == 1, "Incorrect number of dataTypes." assert not datatypes[0].id == inputs[0].id, "Retrieved wrong dataType." assert not datatypes[1].id == inputs[0].id, "Retrieved wrong dataType." assert group.id == inputs[0].id, "Retrieved wrong dataType." inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) assert len(inputs) == 1, "Incorrect number of dataTypes." assert not datatypes[0].id == inputs[0].id, "Retrieved wrong dataType." assert not datatypes[1].id == inputs[0].id, "Retrieved wrong dataType." assert group.id == inputs[0].id, "Retrieved wrong dataType."
def retrieve_launchers(self, datatype_gid, inspect_group=False, include_categories=None): """ Returns all the available launch-able algorithms from the database. Filter the ones accepting as required input a specific DataType. :param datatype_gid: GID, to filter algorithms for this particular entity. :param inspect_group: TRUE if we are now in the inspection of sub-entities in a DataTypeGroup :param include_categories: List of categories to be included in the result. When None, all lanchable categories are included """ try: all_launch_categ = dao.get_launchable_categories() launch_categ = dict((categ.id, categ.displayname) for categ in all_launch_categ if include_categories is None or categ.id in include_categories) datatype_instance = dao.get_datatype_by_gid(datatype_gid) data_class = datatype_instance.__class__ all_compatible_classes = [data_class.__name__] for one_class in getmro(data_class): if issubclass(one_class, MappedType) and one_class.__name__ not in all_compatible_classes: all_compatible_classes.append(one_class.__name__) self.logger.debug("Searching in categories: " + str(len(launch_categ)) + " - " + str(launch_categ.keys()) + "-" + str(include_categories)) launchable_groups = dao.get_apliable_algo_groups(all_compatible_classes, launch_categ.keys()) to_remove = [] for one_group in launchable_groups: compatible_algorithms = [] for one_algo in one_group.children: filter_chain = FilterChain.from_json(one_algo.datatype_filter) if not filter_chain or filter_chain.get_python_filter_equivalent(datatype_instance): compatible_algorithms.append(one_algo) if len(compatible_algorithms) > 0: one_group.children = copy.deepcopy(compatible_algorithms) else: to_remove.append(one_group) for one_group in to_remove: launchable_groups.remove(one_group) del one_group launchers = ProjectService.__prepare_group_result(launchable_groups, launch_categ, inspect_group) if data_class.__name__ == model.DataTypeGroup.__name__: # If part of a group, update also with specific launchers of the child datatype dt_group = dao.get_datatype_group_by_gid(datatype_gid) datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) if len(datatypes): datatype = datatypes[-1] datatype = dao.get_datatype_by_gid(datatype.gid) views_categ_id = dao.get_visualisers_categories()[0].id categories_for_small_type = [categ.id for categ in all_launch_categ if categ.id != views_categ_id and (include_categories is None or categ.id in include_categories)] if categories_for_small_type: specific_launchers = self.retrieve_launchers(datatype.gid, True, categories_for_small_type) for key in specific_launchers: if key in launchers: launchers[key].update(specific_launchers[key]) else: launchers[key] = specific_launchers[key] return launchers except Exception, excep: ProjectService().logger.exception(excep) ProjectService().logger.warning("Attempting to filter launcher for group despite exception!") return ProjectService.__prepare_group_result([], [], inspect_group)
def get_datatypes_from_datatype_group(datatype_group_id): """ Retrieve all dataType which are part from the given dataType group. """ return dao.get_datatypes_from_datatype_group(datatype_group_id)
def set_group_descendants_visibility(datatype_group_id): datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id) for group_dt in datatypes_in_group: set_visibility(group_dt)