コード例 #1
0
    def get_launchable_algorithms(self, datatype_gid):
        """
        :param datatype_gid: Filter only algorithms compatible with this GUID
        :return: dict(category_name: List AlgorithmTransientGroup)
        """
        categories = dao.get_launchable_categories()
        datatype_instance, filtered_adapters, has_operations_warning = self._get_launchable_algorithms(
            datatype_gid, categories)

        if isinstance(datatype_instance, DataTypeGroup):
            # If part of a group, update also with specific analyzers of the child datatype
            dt_group = dao.get_datatype_group_by_gid(datatype_gid)
            datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
            if len(datatypes):
                datatype = datatypes[-1]
                analyze_category = dao.get_launchable_categories(True)
                _, inner_analyzers, _ = self._get_launchable_algorithms(
                    datatype.gid, analyze_category)
                filtered_adapters.extend(inner_analyzers)

        categories_dict = dict()
        for c in categories:
            categories_dict[c.id] = c.displayname

        return self._group_adapters_by_category(
            filtered_adapters, categories_dict), has_operations_warning
コード例 #2
0
    def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`

        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)

        range1_name, range1_values, range1_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range1)
        range2_name, range2_values, range2_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
        pse_context.setRanges(range1_name, range1_values, range1_labels, range2_name, range2_values, range2_labels)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if range2_name is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
コード例 #3
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning(
                "Attempt to delete DT[%s] which no longer exists." %
                datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid,
                                                skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(
                model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid,
                                            skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id,
                                                  "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation,
                                                    operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(
                project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " +
                                          str(datatype_gid))
コード例 #4
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)
        _, range1_name, range1_labels = operation_group.load_range_numbers(
            operation_group.range1)
        has_range2, range2_name, range2_labels = operation_group.load_range_numbers(
            operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, range1_labels,
                                         range2_labels, color_metric,
                                         size_metric, back_page)
        final_dict = dict()
        operations = dao.get_operations_in_group(operation_group.id)
        for operation_ in operations:
            if operation_.status == model.STATUS_STARTED:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if has_range2 is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatype = dao.get_results_for_operation(operation_.id)[0]
                measures = dao.get_generic_entity(DatatypeMeasure,
                                                  datatype.gid,
                                                  '_analyzed_datatype')
                pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {
                    key_2: pse_context.build_node_info(operation_, datatype)
                }
            else:
                final_dict[key_1][key_2] = pse_context.build_node_info(
                    operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
コード例 #5
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return
        user = dao.get_user_for_datatype(datatype.id)
        freed_space = datatype.disk_size or 0
        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            freed_space = dao.get_datatype_group_disk_size(datatype.id)
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)
            freed_space = dao.get_datatype_group_disk_size(datatype.id)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

        user.used_disk_space = user.used_disk_space - freed_space
        dao.store_entity(user)
コード例 #6
0
 def burst_preview(self, datatype_group_gid, width, height):
     """
     Generate the preview for the burst page.
     """
     if not width:
         width = height
     figure_size = (700, 700)
     if width and height:
         figure_size = (width, height)
     datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
     result_dict = self.launch(datatype_group=datatype_group, figure_size=figure_size)
     return result_dict
コード例 #7
0
 def burst_preview(self, datatype_group_gid, width, height):
     """
     Generate the preview for the burst page.
     """
     if not width:
         width = height
     figure_size = (700, 700)
     if width and height:
         figure_size = (width, height)
     datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
     result_dict = self.launch(datatype_group=datatype_group, figure_size=figure_size)
     return result_dict
コード例 #8
0
    def __init__(self, datatype_group_gid):
        self.datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)

        if self.datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        self.operation_group = dao.get_operationgroup_by_id(
            self.datatype_group.fk_operation_group)
        self.operations = dao.get_operations_in_group(self.operation_group.id)
        self.pse_model_list = self.parse_pse_data_for_display()
        self.all_metrics = dict()
        self._prepare_ranges_data()
コード例 #9
0
    def get_node_matrix(self, datatype_group_gid):

        algorithm = self.flow_service.get_algorithm_by_module_and_class(ISOCLINE_PSE_ADAPTER_MODULE,
                                                                        ISOCLINE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
                return adapter.prepare_node_data(datatype_group)
            except LaunchException as ex:
                self.logger.error(ex.message)
                error_msg = urllib.quote(ex.message)
        else:
            error_msg = urllib.quote("Isocline PSE requires a 2D range of floating point values.")

        name = urllib.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
コード例 #10
0
    def get_node_matrix(self, datatype_group_gid, matrix_shape):

        algorithm = self.flow_service.get_algorithm_by_module_and_class(ISOCLINE_PSE_ADAPTER_MODULE,
                                                                        ISOCLINE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
                return adapter.prepare_node_data(datatype_group, matrix_shape)
            except LaunchException as ex:
                self.logger.error(ex.message)
                error_msg = urllib.quote(ex.message)
        else:
            error_msg = urllib.quote("Isocline PSE requires a 2D range of floating point values.")

        name = urllib.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
コード例 #11
0
    def get_launchable_algorithms(self, datatype_gid):
        """
        :param datatype_gid: Filter only algorithms compatible with this GUID
        :return: dict(category_name: List AlgorithmTransientGroup)
        """
        categories = dao.get_launchable_categories()
        datatype_instance, filtered_adapters = self._get_launchable_algorithms(datatype_gid, categories)

        if isinstance(datatype_instance, model.DataTypeGroup):
            # If part of a group, update also with specific analyzers of the child datatype
            dt_group = dao.get_datatype_group_by_gid(datatype_gid)
            datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
            if len(datatypes):
                datatype = datatypes[-1]
                analyze_category = dao.get_launchable_categories(True)
                _, inner_analyzers = self._get_launchable_algorithms(datatype.gid, analyze_category)
                filtered_adapters.extend(inner_analyzers)

        categories_dict = dict()
        for c in categories:
            categories_dict[c.id] = c.displayname

        return self._group_adapters_by_category(filtered_adapters, categories_dict)
コード例 #12
0
 def get_datatypegroup_by_gid(datatypegroup_gid):
     """ Returns the DataTypeGroup with the specified gid. """
     return dao.get_datatype_group_by_gid(datatypegroup_gid)
コード例 #13
0
ファイル: project_service.py プロジェクト: nuuria8/tvb-root
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        datatype_group = None
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            datatype_group = datatype
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype_group = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            operations_set = [datatype_group.fk_from_operation]
            self.logger.debug("Removing datatype group %s" % datatype_group)
            if datatype_group.fk_parent_burst:
                burst = dao.get_generic_entity(BurstConfiguration, datatype_group.fk_parent_burst, 'gid')[0]
                dao.remove_entity(BurstConfiguration, burst.id)
                if burst.fk_metric_operation_group:
                    correct = correct and self._remove_operation_group(burst.fk_metric_operation_group, project_id,
                                                                       skip_validation, operations_set)

                if burst.fk_operation_group:
                    correct = correct and self._remove_operation_group(burst.fk_operation_group, project_id,
                                                                       skip_validation, operations_set)

            else:
                self._remove_datatype_group_dts(project_id, datatype_group.id, skip_validation, operations_set)

                datatype_group = dao.get_datatype_group_by_gid(datatype_group.gid)
                dao.remove_entity(DataTypeGroup, datatype.id)
                correct = correct and dao.remove_entity(OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            op_burst = dao.get_burst_for_operation_id(operation_id)
            if op_burst:
                correct = correct and dao.remove_entity(BurstConfiguration, op_burst.id)
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, operation_id)

        encryption_handler.push_folder_to_sync(self.structure_helper.get_project_folder(project))
        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
コード例 #14
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param back_page: Page where back button will direct
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`
        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)

        name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range1)
        name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric,
                                         size_metric, back_page)
        pse_context.setRanges(name1, values1, labels1, name2, values2, labels2,
                              only_numbers1 and only_numbers2)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        fake_numbers1 = dict(zip(values1, range(len(list(values1)))))
        fake_numbers2 = dict(zip(values2, range(len(list(values2)))))

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers1, name1, fake_numbers1)
            key_2 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers2, name2, fake_numbers2)

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.gid,
                            '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(
                operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        if not only_numbers1:
            pse_context.values_x = range(len(list(values1)))
        if not only_numbers2:
            pse_context.values_y = range(len(list(values2)))
        return pse_context
コード例 #15
0
    def retrieve_launchers(self, datatype_gid, inspect_group=False, include_categories=None):
        """
        Returns all the available launch-able algorithms from the database.
        Filter the ones accepting as required input a specific DataType.

        :param datatype_gid: GID, to filter algorithms for this particular entity.
        :param inspect_group: TRUE if we are now in the inspection of sub-entities in a DataTypeGroup
        :param include_categories: List of categories to be included in the result.
                When None, all lanchable categories are included
        """
        try:
            all_launch_categ = dao.get_launchable_categories()
            launch_categ = dict((categ.id, categ.displayname) for categ in all_launch_categ
                                if include_categories is None or categ.id in include_categories)

            datatype_instance = dao.get_datatype_by_gid(datatype_gid)
            data_class = datatype_instance.__class__
            all_compatible_classes = [data_class.__name__]
            for one_class in getmro(data_class):
                if issubclass(one_class, MappedType) and one_class.__name__ not in all_compatible_classes:
                    all_compatible_classes.append(one_class.__name__)

            self.logger.debug("Searching in categories: " + str(len(launch_categ)) + " - " +
                              str(launch_categ.keys()) + "-" + str(include_categories))
            launchable_groups = dao.get_apliable_algo_groups(all_compatible_classes, launch_categ.keys())

            to_remove = []
            for one_group in launchable_groups:
                compatible_algorithms = []
                for one_algo in one_group.children:
                    filter_chain = FilterChain.from_json(one_algo.datatype_filter)
                    if not filter_chain or filter_chain.get_python_filter_equivalent(datatype_instance):
                        compatible_algorithms.append(one_algo)
                if len(compatible_algorithms) > 0:
                    one_group.children = copy.deepcopy(compatible_algorithms)
                else:
                    to_remove.append(one_group)

            for one_group in to_remove:
                launchable_groups.remove(one_group)
                del one_group

            launchers = ProjectService.__prepare_group_result(launchable_groups, launch_categ, inspect_group)

            if data_class.__name__ == model.DataTypeGroup.__name__:
                # If part of a group, update also with specific launchers of the child datatype
                dt_group = dao.get_datatype_group_by_gid(datatype_gid)
                datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
                if len(datatypes):
                    datatype = datatypes[-1]
                    datatype = dao.get_datatype_by_gid(datatype.gid)

                    views_categ_id = dao.get_visualisers_categories()[0].id
                    categories_for_small_type = [categ.id for categ in all_launch_categ
                                                 if categ.id != views_categ_id and (include_categories is None or
                                                                                    categ.id in include_categories)]
                    if categories_for_small_type:
                        specific_launchers = self.retrieve_launchers(datatype.gid, True, categories_for_small_type)
                        for key in specific_launchers:
                            if key in launchers:
                                launchers[key].update(specific_launchers[key])
                            else:
                                launchers[key] = specific_launchers[key]
            return launchers

        except Exception, excep:
            ProjectService().logger.exception(excep)
            ProjectService().logger.warning("Attempting to filter launcher for group despite exception!")
            return ProjectService.__prepare_group_result([], [], inspect_group)
コード例 #16
0
 def get_datatypegroup_by_gid(datatypegroup_gid):
     """ Returns the DataTypeGroup with the specified gid. """
     return dao.get_datatype_group_by_gid(datatypegroup_gid)
コード例 #17
0
ファイル: pse_isocline.py プロジェクト: maedoc/tvb-framework
 def burst_preview(self, datatype_group_gid):
     """
     Generate the preview for the burst page.
     """
     datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
     return self.launch(datatype_group=datatype_group)
コード例 #18
0
ファイル: pse_discrete.py プロジェクト: maedoc/tvb-framework
    def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param back_page: Page where back button will direct
        :param color_metric: String referring to metric to apply on colors
        :param size_metric:  String referring to metric to apply on sizes

        :returns: `ContextDiscretePSE`
        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)

        name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                         operation_group.range1)
        name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                         operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
        pse_context.setRanges(name1, values1, labels1, name2, values2, labels2,
                              only_numbers1 and only_numbers2)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        fake_numbers1 = dict(zip(values1, range(len(list(values1)))))
        fake_numbers2 = dict(zip(values2, range(len(list(values2)))))

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers1, name1, fake_numbers1)
            key_2 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers2, name2, fake_numbers2)

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        if not only_numbers1:
            pse_context.values_x = range(len(list(values1)))
        if not only_numbers2:
            pse_context.values_y = range(len(list(values2)))
        return pse_context
コード例 #19
0
    def retrieve_launchers(self, dataname, datatype_gid=None, inspect_group=False, exclude_categories=None):
        """
        Returns all the available launch-able algorithms from the database.
        Filter the ones accepting as required input a specific DataType.

        :param dataname: String or class representing DataType to retrieve filters for it.
        :param datatype_gid: Optional GID, to filter algorithms for this particular entity.
        :param inspect_group: TRUE if we are now in the inspection of sub-entities in a DataTypeGroup
        :param exclude_categories: List of categories to be excluded from the result.
        """
        if exclude_categories is None:
            exclude_categories = []
        launch_categ = dao.get_launchable_categories()
        launch_categ = dict((categ.id, categ.displayname) for categ in launch_categ
                            if categ.id not in exclude_categories)
        launch_groups = dao.get_apliable_algo_groups(dataname, launch_categ.keys())

        if datatype_gid is None:
            return ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group)

        try:
            datatype_instance = dao.get_datatype_by_gid(datatype_gid)
            data_class = datatype_instance.__class__
            for one_class in data_class.__bases__:
                launch_groups.extend(dao.get_apliable_algo_groups(one_class.__name__, launch_categ.keys()))
            specific_datatype = dao.get_generic_entity(data_class, datatype_gid, "gid")
            to_remove = []
            for one_group in launch_groups:
                valid_algorithms = []
                for one_algo in one_group.children:
                    filter_chain = FilterChain.from_json(one_algo.datatype_filter)
                    if not filter_chain or filter_chain.get_python_filter_equivalent(specific_datatype[0]):
                        valid_algorithms.append(one_algo)
                if len(valid_algorithms) > 0:
                    one_group.children = copy.deepcopy(valid_algorithms)
                else:
                    to_remove.append(one_group)
            for one_group in to_remove:
                launch_groups.remove(one_group)
                del one_group
            launchers = ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group)

            if dataname == model.DataTypeGroup.__name__:
                # If part of a group, update also with specific launchers of that datatype
                dt_group = dao.get_datatype_group_by_gid(datatype_gid)
                datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
                if len(datatypes):
                    datatype = datatypes[-1]
                    datatype = dao.get_datatype_by_gid(datatype.gid)
                    views_categ_id = dao.get_visualisers_categories()[0].id
                    specific_launchers = self.retrieve_launchers(datatype.__class__.__name__, datatype.gid,
                                                                 True, [views_categ_id] + exclude_categories)
                    for key in specific_launchers:
                        if key in launchers:
                            launchers[key].update(specific_launchers[key])
                        else:
                            launchers[key] = specific_launchers[key]
            return launchers
        except Exception, excep:
            ProjectService().logger.exception(excep)
            ProjectService().logger.warning("Attempting to filter launcher  for group despite exception!")
            return ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group)
コード例 #20
0
    def retrieve_launchers(self, datatype_gid, inspect_group=False, include_categories=None):
        """
        Returns all the available launch-able algorithms from the database.
        Filter the ones accepting as required input a specific DataType.

        :param datatype_gid: GID, to filter algorithms for this particular entity.
        :param inspect_group: TRUE if we are now in the inspection of sub-entities in a DataTypeGroup
        :param include_categories: List of categories to be included in the result.
                When None, all lanchable categories are included
        """
        try:
            all_launch_categ = dao.get_launchable_categories()
            launch_categ = dict((categ.id, categ.displayname) for categ in all_launch_categ
                                if include_categories is None or categ.id in include_categories)

            datatype_instance = dao.get_datatype_by_gid(datatype_gid)
            data_class = datatype_instance.__class__
            all_compatible_classes = [data_class.__name__]
            for one_class in getmro(data_class):
                if issubclass(one_class, MappedType) and one_class.__name__ not in all_compatible_classes:
                    all_compatible_classes.append(one_class.__name__)

            self.logger.debug("Searching in categories: " + str(len(launch_categ)) + " - " +
                              str(launch_categ.keys()) + "-" + str(include_categories))
            launchable_groups = dao.get_apliable_algo_groups(all_compatible_classes, launch_categ.keys())

            to_remove = []
            for one_group in launchable_groups:
                compatible_algorithms = []
                for one_algo in one_group.children:
                    filter_chain = FilterChain.from_json(one_algo.datatype_filter)
                    if not filter_chain or filter_chain.get_python_filter_equivalent(datatype_instance):
                        compatible_algorithms.append(one_algo)
                if len(compatible_algorithms) > 0:
                    one_group.children = copy.deepcopy(compatible_algorithms)
                else:
                    to_remove.append(one_group)

            for one_group in to_remove:
                launchable_groups.remove(one_group)
                del one_group

            launchers = ProjectService.__prepare_group_result(launchable_groups, launch_categ, inspect_group)

            if data_class.__name__ == model.DataTypeGroup.__name__:
                # If part of a group, update also with specific launchers of the child datatype
                dt_group = dao.get_datatype_group_by_gid(datatype_gid)
                datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
                if len(datatypes):
                    datatype = datatypes[-1]
                    datatype = dao.get_datatype_by_gid(datatype.gid)

                    views_categ_id = dao.get_visualisers_categories()[0].id
                    categories_for_small_type = [categ.id for categ in all_launch_categ
                                                 if categ.id != views_categ_id and (include_categories is None or
                                                                                    categ.id in include_categories)]
                    if categories_for_small_type:
                        specific_launchers = self.retrieve_launchers(datatype.gid, True, categories_for_small_type)
                        for key in specific_launchers:
                            if key in launchers:
                                launchers[key].update(specific_launchers[key])
                            else:
                                launchers[key] = specific_launchers[key]
            return launchers

        except Exception, excep:
            ProjectService().logger.exception(excep)
            ProjectService().logger.warning("Attempting to filter launcher for group despite exception!")
            return ProjectService.__prepare_group_result([], [], inspect_group)
コード例 #21
0
ファイル: pse_isocline.py プロジェクト: nedkab/tvb-framework
 def burst_preview(self, datatype_group_gid):
     """
     Generate the preview for the burst page.
     """
     datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
     return self.launch(datatype_group=datatype_group)