Beispiel #1
0
class OperationService:
    """
    Class responsible for preparing an operation launch.
    It will prepare parameters, and decide if the operation is to be executed
    immediately, or to be sent on the cluster.
    """
    ATT_UID = "uid"

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.storage_interface = StorageInterface()

    ##########################################################################################
    ######## Methods related to launching operations start here ##############################
    ##########################################################################################

    def initiate_operation(self, current_user, project, adapter_instance, visible=True, model_view=None):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).

        Invoke custom method on an Adapter Instance. Make sure when the
        operation has finished that the correct results are stored into DB.
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        algo = adapter_instance.stored_adapter
        operation = self.prepare_operation(current_user.id, project, algo, visible, model_view)
        if adapter_instance.launch_mode == AdapterLaunchModeEnum.SYNC_SAME_MEM:
            return self.initiate_prelaunch(operation, adapter_instance)
        else:
            return self._send_to_cluster(operation, adapter_instance, current_user.username)

    @staticmethod
    def prepare_metadata(algo_category, burst=None, current_ga=GenericAttributes()):
        """
        Gather generic_metadata from submitted fields and current to be execute algorithm.
        Will populate STATE, GROUP, etc in generic_metadata
        """
        generic_metadata = GenericAttributes()
        generic_metadata.state = algo_category.defaultdatastate
        generic_metadata.parent_burst = burst
        generic_metadata.fill_from(current_ga)
        return generic_metadata

    @staticmethod
    def _read_set(values):
        """ Parse a committed UI possible list of values, into a set converted into string."""
        if isinstance(values, list):
            set_values = []
            values_str = ""
            for val in values:
                if val not in set_values:
                    set_values.append(val)
                    values_str = values_str + " " + str(val)
            values = values_str
        return str(values).strip()

    def group_operation_launch(self, user_id, project, algorithm_id, category_id):
        """
        Create and prepare the launch of a group of operations.
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        ops, _ = self.prepare_operation(user_id, project, algorithm)
        for operation in ops:
            self.launch_operation(operation.id, True)

    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index, sim_operation.id, 'fk_from_operation')[0]
        ga = self.prepare_metadata(metric_algo.algorithm_category, time_series_index.fk_parent_burst)
        ga.visible = False

        view_model = get_class_by_name("{}.{}".format(MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(ALGORITHMS.keys())
        view_model.generic_attributes = ga

        parent_burst = dao.get_generic_entity(BurstConfiguration, time_series_index.fk_parent_burst, 'gid')[0]
        metric_op_group = dao.get_operationgroup_by_id(parent_burst.fk_metric_operation_group)
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = sim_operation.range_values
        view_model.operation_group_gid = uuid.UUID(metric_op_group.gid)
        view_model.ranges = json.dumps(parent_burst.ranges)
        view_model.range_values = range_values
        view_model.is_metric_operation = True
        metric_operation = Operation(view_model.gid.hex, sim_operation.fk_launched_by, sim_operation.fk_launched_in,
                                     metric_algo.id, user_group=ga.operation_tag, op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(DataTypeGroup, metric_operation_group_id,
                                                        'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id
            dao.store_entity(metrics_datatype_group)

        self.store_view_model(metric_operation, sim_operation.project, view_model)
        return metric_operation

    @transactional
    def prepare_operation(self, user_id, project, algorithm, visible=True, view_model=None, ranges=None,
                          burst_gid=None, op_group_id=None):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        """
        algo_category = dao.get_category_by_id(algorithm.fk_category)
        ga = self.prepare_metadata(algo_category, current_ga=view_model.generic_attributes, burst=burst_gid)
        ga.visible = visible
        view_model.generic_attributes = ga

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) +
                          ",algorithmId=" + str(algorithm.id) + ")")

        operation = Operation(view_model.gid.hex, user_id, project.id, algorithm.id, user_group=ga.operation_tag,
                              op_group_id=op_group_id, range_values=ranges)
        operation = dao.store_entity(operation)

        self.store_view_model(operation, project, view_model)

        return operation

    @staticmethod
    def store_view_model(operation, project, view_model):
        storage_path = StorageInterface().get_project_folder(project.name, str(operation.id))
        h5.store_view_model(view_model, storage_path)
        view_model_size_on_disk = StorageInterface.compute_recursive_h5_disk_usage(storage_path)
        operation.view_model_disk_size = view_model_size_on_disk
        dao.store_entity(operation)

    def initiate_prelaunch(self, operation, adapter_instance):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        temp_files = []
        try:
            operation = dao.get_operation_by_id(operation.id)  # Load Lazy fields

            disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
            user_disk_space = dao.compute_user_generated_disk_size(operation.fk_launched_by)  # From kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            view_model = adapter_instance.load_view_model(operation)
            try:
                form = adapter_instance.get_form()
                form = form() if isclass(form) else form
                fields = form.get_upload_field_names()
                project = dao.get_project_by_id(operation.fk_launched_in)
                tmp_folder = self.storage_interface.get_temp_folder(project.name)
                for upload_field in fields:
                    if hasattr(view_model, upload_field):
                        file = getattr(view_model, upload_field)
                        if file.startswith(tmp_folder) or file.startswith(TvbProfile.current.TVB_TEMP_FOLDER):
                            temp_files.append(file)
            except AttributeError:
                # Skip if we don't have upload fields on current form
                pass
            result_msg, nr_datatypes = adapter_instance._prelaunch(operation, view_model, available_space)
            operation = dao.get_operation_by_id(operation.id)
            operation.mark_complete(STATUS_FINISHED)
            dao.store_entity(operation)

            self._update_vm_generic_operation_tag(view_model, operation)
            self._remove_files(temp_files)

        except zipfile.BadZipfile as excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
        except TVBException as excep:
            self._handle_exception(excep, temp_files, excep.message, operation)
        except MemoryError:
            msg = ("Could not execute operation because there is not enough free memory." +
                   " Please adjust operation parameters and re-launch it.")
            self._handle_exception(Exception(msg), temp_files, msg, operation)
        except Exception as excep1:
            msg = "Could not launch Operation with the given input data!"
            self._handle_exception(excep1, temp_files, msg, operation)

        if operation.fk_operation_group and 'SimulatorAdapter' in operation.algorithm.classname:
            next_op = self._prepare_metric_operation(operation)
            self.launch_operation(next_op.id)
        return result_msg

    def _send_to_cluster(self, operation, adapter_instance, current_username="******"):
        """ Initiate operation on cluster"""
        try:
            BackendClientFactory.execute(str(operation.id), current_username, adapter_instance)
        except TVBException as ex:
            self._handle_exception(ex, {}, ex.message, operation)
        except Exception as excep:
            self._handle_exception(excep, {}, "Could not start operation!", operation)

        return operation

    @staticmethod
    def _update_vm_generic_operation_tag(view_model, operation):
        project = dao.get_project_by_id(operation.fk_launched_in)
        h5_path = h5.path_for(operation.id, ViewModelH5, view_model.gid, project.name, type(view_model).__name__)
        with ViewModelH5(h5_path, view_model) as vm_h5:
            vm_h5.operation_tag.store(operation.user_group)

    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                adapter_instance = ABCAdapter.build_adapter(algorithm)

            if send_to_cluster:
                self._send_to_cluster(operation, adapter_instance, operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance)

    def _handle_exception(self, exception, temp_files, message, operation=None):
        """
        Common way to treat exceptions:
            - remove temporary files, if any
            - set status ERROR on current operation (if any)
            - log exception
        """
        self.logger.exception(message)
        if operation is not None:
            BurstService().persist_operation_state(operation, STATUS_ERROR, str(exception))
        self._remove_files(temp_files)
        exception.message = message
        raise exception.with_traceback(
            sys.exc_info()[2])  # when rethrowing in python this is required to preserve the stack trace

    def _remove_files(self, file_list):
        """
        Remove any files that exist in the file_dictionary.
        Currently used to delete temporary files created during an operation.
        """
        for pth in file_list:
            if pth is not None:
                pth = str(pth)
                try:
                    if os.path.exists(pth) and os.path.isfile(pth):
                        os.remove(pth)
                        if len(os.listdir(os.path.dirname(pth))) == 0:
                            self.storage_interface.remove_folder(os.path.dirname(pth))
                        self.logger.debug("We no longer need file:" + pth + " => deleted")
                    else:
                        self.logger.warning("Trying to remove not existent file:" + pth)
                except OSError:
                    self.logger.exception("Could not cleanup file!")

    @staticmethod
    def _range_name(range_no):
        return PARAM_RANGE_PREFIX + str(range_no)

    def fire_operation(self, adapter_instance, current_user, project_id, visible=True, view_model=None):
        """
        Launch an operation, specified by AdapterInstance, for current_user and project with project_id.
        """
        operation_name = str(adapter_instance.__class__.__name__)
        try:
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)

            result = self.initiate_operation(current_user, project, adapter_instance, visible,
                                             model_view=view_model)
            self.logger.info("Finished operation launch:" + operation_name)
            return result

        except TVBException as excep:
            self.logger.exception("Could not launch operation " + operation_name +
                                  " with the given set of input data, because: " + excep.message)
            raise OperationException(excep.message, excep)
        except Exception as excep:
            self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data!")
            raise OperationException(str(excep))

    @staticmethod
    def load_operation(operation_id):
        """ Retrieve previously stored Operation from DB, and load operation.burst attribute"""
        operation = dao.get_operation_by_id(operation_id)
        operation.burst = dao.get_burst_for_operation_id(operation_id)
        return operation

    @staticmethod
    def stop_operation(operation_id, is_group=False, remove_after_stop=False):
        # type: (int, bool, bool) -> bool
        """
        Stop (also named Cancel) the operation given by operation_id,
        and potentially also remove it after (with all linked data).
        In case the Operation has a linked Burst, remove that too.
        :param operation_id: ID for Operation (or OperationGroup) to be canceled/removed
        :param is_group: When true stop all the operations from that group.
        :param remove_after_stop: if True, also remove the operation(s) after stopping
        :returns True if the stop step was successfully
        """
        result = False
        if is_group:
            op_group = ProjectService.get_operation_group_by_id(operation_id)
            operations_in_group = ProjectService.get_operations_in_group(op_group)
            for operation in operations_in_group:
                result = OperationService.stop_operation(operation.id, False, remove_after_stop) or result
        elif dao.try_get_operation_by_id(operation_id) is not None:
            result = BackendClientFactory.stop_operation(operation_id)
            if remove_after_stop:
                burst_config = dao.get_burst_for_direct_operation_id(operation_id)
                ProjectService().remove_operation(operation_id)
                if burst_config is not None:
                    result = dao.remove_entity(BurstConfiguration, burst_config.id) or result

        return result
Beispiel #2
0
class AlgorithmService(object):
    """
    Service Layer for Algorithms manipulation (e.g. find all Uploaders, Filter algo by category, etc)
    """
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.storage_interface = StorageInterface()

    @staticmethod
    def get_category_by_id(identifier):
        """ Pass to DAO the retrieve of category by ID operation."""
        return dao.get_category_by_id(identifier)

    @staticmethod
    def get_raw_categories():
        """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)"""
        return dao.get_raw_categories()

    @staticmethod
    def get_visualisers_category():
        """Retrieve all Algorithm categories, with display capability"""
        result = dao.get_visualisers_categories()
        if not result:
            raise ValueError("View Category not found!!!")
        return result[0]

    @staticmethod
    def get_algorithm_by_identifier(ident):
        """
        Retrieve Algorithm entity by ID.
        Return None, if ID is not found in DB.
        """
        return dao.get_algorithm_by_id(ident)

    @staticmethod
    def get_operation_numbers(proj_id):
        """ Count total number of operations started for current project. """
        return dao.get_operation_numbers(proj_id)

    def _prepare_dt_display_name(self, dt_index, dt):
        # dt is a result of the get_values_of_datatype function
        db_dt = dao.get_generic_entity(dt_index, dt[2], "gid")
        display_name = db_dt[0].display_name
        display_name += ' - ' + (dt[3] or "None ")  # Subject
        if dt[5]:
            display_name += ' - From: ' + str(dt[5])
        else:
            display_name += date2string(dt[4])
        if dt[6]:
            display_name += ' - ' + str(dt[6])
        display_name += ' - ID:' + str(dt[0])

        return display_name

    def fill_selectfield_with_datatypes(self,
                                        field,
                                        project_id,
                                        extra_conditions=None):
        # type: (TraitDataTypeSelectField, int, list) -> None
        filtering_conditions = FilterChain()
        filtering_conditions += field.conditions
        filtering_conditions += extra_conditions
        datatypes, _ = dao.get_values_of_datatype(project_id,
                                                  field.datatype_index,
                                                  filtering_conditions)
        datatype_options = []
        for datatype in datatypes:
            display_name = self._prepare_dt_display_name(
                field.datatype_index, datatype)
            datatype_options.append((datatype, display_name))
        field.datatype_options = datatype_options

    def _fill_form_with_datatypes(self,
                                  form,
                                  project_id,
                                  extra_conditions=None):
        for form_field in form.trait_fields:
            if isinstance(form_field, TraitDataTypeSelectField):
                self.fill_selectfield_with_datatypes(form_field, project_id,
                                                     extra_conditions)
        return form

    def prepare_adapter_form(self,
                             adapter_instance=None,
                             form_instance=None,
                             project_id=None,
                             extra_conditions=None):
        # type: (ABCAdapter, ABCAdapterForm, int, []) -> ABCAdapterForm
        form = None
        if form_instance is not None:
            form = form_instance
        elif adapter_instance is not None:
            form = adapter_instance.get_form()()

        if form is None:
            raise OperationException("Cannot prepare None form")

        form = self._fill_form_with_datatypes(form, project_id,
                                              extra_conditions)
        return form

    def _prepare_upload_post_data(self, form, post_data, project_id):
        for form_field in form.trait_fields:
            if isinstance(form_field,
                          TraitUploadField) and form_field.name in post_data:
                field = post_data[form_field.name]
                file_name = None
                if hasattr(field, 'file') and field.file is not None:
                    project = dao.get_project_by_id(project_id)
                    temporary_storage = self.storage_interface.get_temp_folder(
                        project.name)
                    try:
                        uq_name = date2string(datetime.now(),
                                              True) + '_' + str(0)
                        file_name = TEMPORARY_PREFIX + uq_name + '_' + field.filename
                        file_name = os.path.join(temporary_storage, file_name)

                        with open(file_name, 'wb') as file_obj:
                            file_obj.write(field.file.read())
                    except Exception as excep:
                        # TODO: is this handled properly?
                        self.storage_interface.remove_files([file_name])
                        excep.message = 'Could not continue: Invalid input files'
                        raise excep
                post_data[form_field.name] = file_name

    def fill_adapter_form(self, adapter_instance, post_data, project_id):
        # type: (ABCAdapter, dict, int) -> ABCAdapterForm
        form = self.prepare_adapter_form(adapter_instance=adapter_instance,
                                         project_id=project_id)
        if isinstance(form, ABCUploaderForm):
            self._prepare_upload_post_data(form, post_data, project_id)

        if 'fill_defaults' in post_data:
            form.fill_from_post_plus_defaults(post_data)
        else:
            form.fill_from_post(post_data)

        return form

    def prepare_adapter(self, stored_adapter):

        adapter_module = stored_adapter.module
        adapter_name = stored_adapter.classname
        try:
            # Prepare Adapter Interface, by populating with existent data,
            # in case of a parameter of type DataType.
            adapter_instance = ABCAdapter.build_adapter(stored_adapter)
            return adapter_instance
        except Exception:
            self.logger.exception('Not found:' + adapter_name + ' in:' +
                                  adapter_module)
            raise OperationException("Could not prepare " + adapter_name)

    @staticmethod
    def get_algorithm_by_module_and_class(module, classname):
        """
        Get the db entry from the algorithm table for the given module and 
        class.
        """
        return dao.get_algorithm_by_module(module, classname)

    @staticmethod
    def create_link(data_id, project_id):
        """
        For a list of dataType IDs and a project id create all the required links.
        """
        link = Links(data_id, project_id)
        dao.store_entity(link)

    @staticmethod
    def remove_link(dt_id, project_id):
        """
        Remove the link from the datatype given by dt_id to project given by project_id.
        """
        link = dao.get_link(dt_id, project_id)
        if link is not None:
            dao.remove_entity(Links, link.id)

    @staticmethod
    def get_upload_algorithms():
        """
        :return: List of StoredAdapter entities
        """
        categories = dao.get_uploader_categories()
        categories_ids = [categ.id for categ in categories]
        return dao.get_adapters_from_categories(categories_ids)

    @staticmethod
    def get_analyze_groups():
        """
        :return: list of AlgorithmTransientGroup entities
        """
        categories = dao.get_launchable_categories(elimin_viewers=True)
        categories_ids = [categ.id for categ in categories]
        stored_adapters = dao.get_adapters_from_categories(categories_ids)

        groups_list = []
        for adapter in stored_adapters:
            # For empty groups, this time, we fill the actual adapter
            group = AlgorithmTransientGroup(
                adapter.group_name or adapter.displayname,
                adapter.group_description or adapter.description)
            group = AlgorithmService._find_group(groups_list, group)
            group.children.append(adapter)
        return categories[0], groups_list

    @staticmethod
    def _find_group(groups_list, new_group):
        for i in range(len(groups_list) - 1, -1, -1):
            current_group = groups_list[i]
            if current_group.name == new_group.name and current_group.description == new_group.description:
                return current_group
        # Not found in list
        groups_list.append(new_group)
        return new_group

    def get_visualizers_for_group(self, dt_group_gid):

        categories = dao.get_visualisers_categories()
        return self._get_launchable_algorithms(dt_group_gid, categories)[1]

    def get_launchable_algorithms(self, datatype_gid):
        """
        :param datatype_gid: Filter only algorithms compatible with this GUID
        :return: dict(category_name: List AlgorithmTransientGroup)
        """
        categories = dao.get_launchable_categories()
        datatype_instance, filtered_adapters, has_operations_warning = self._get_launchable_algorithms(
            datatype_gid, categories)

        categories_dict = dict()
        for c in categories:
            categories_dict[c.id] = c.displayname

        return self._group_adapters_by_category(
            filtered_adapters, categories_dict), has_operations_warning

    def _get_launchable_algorithms(self, datatype_gid, categories):
        datatype_instance = dao.get_datatype_by_gid(datatype_gid)
        return self.get_launchable_algorithms_for_datatype(
            datatype_instance, categories)

    def get_launchable_algorithms_for_datatype(self, datatype, categories):
        data_class = datatype.__class__
        all_compatible_classes = [data_class.__name__]
        for one_class in getmro(data_class):
            # from tvb.basic.traits.types_mapped import MappedType

            if issubclass(
                    one_class, DataType
            ) and one_class.__name__ not in all_compatible_classes:
                all_compatible_classes.append(one_class.__name__)

        self.logger.debug("Searching in categories: " + str(categories) +
                          " for classes " + str(all_compatible_classes))
        categories_ids = [categ.id for categ in categories]
        launchable_adapters = dao.get_applicable_adapters(
            all_compatible_classes, categories_ids)

        filtered_adapters = []
        has_operations_warning = False
        for stored_adapter in launchable_adapters:
            filter_chain = FilterChain.from_json(
                stored_adapter.datatype_filter)
            try:
                if not filter_chain or filter_chain.get_python_filter_equivalent(
                        datatype):
                    filtered_adapters.append(stored_adapter)
            except (TypeError, InvalidFilterChainInput):
                self.logger.exception("Could not evaluate filter on " +
                                      str(stored_adapter))
                has_operations_warning = True

        return datatype, filtered_adapters, has_operations_warning

    def _group_adapters_by_category(self, stored_adapters, categories):
        """
        :param stored_adapters: list StoredAdapter
        :return: dict(category_name: List AlgorithmTransientGroup), empty groups all in the same AlgorithmTransientGroup
        """
        categories_dict = dict()
        for adapter in stored_adapters:
            category_name = categories.get(adapter.fk_category)
            if category_name in categories_dict:
                groups_list = categories_dict.get(category_name)
            else:
                groups_list = []
                categories_dict[category_name] = groups_list
            group = AlgorithmTransientGroup(adapter.group_name,
                                            adapter.group_description)
            group = self._find_group(groups_list, group)
            group.children.append(adapter)
        return categories_dict

    @staticmethod
    def get_generic_entity(entity_type, filter_value, select_field):
        return dao.get_generic_entity(entity_type, filter_value, select_field)

    ##########################################################################
    ######## Methods below are for MeasurePoint selections ###################
    ##########################################################################

    @staticmethod
    def get_selections_for_project(project_id, datatype_gid):
        """
        Retrieved from DB saved selections for current project. If a certain selection
        doesn't have all the labels between the labels of the given connectivity than
        this selection will not be returned.
        :returns: List of ConnectivitySelection entities.
        """
        return dao.get_selections_for_project(project_id, datatype_gid)

    @staticmethod
    def save_measure_points_selection(ui_name, selected_nodes, datatype_gid,
                                      project_id):
        """
        Store in DB a ConnectivitySelection.
        """
        select_entities = dao.get_selections_for_project(
            project_id, datatype_gid, ui_name)

        if select_entities:
            # when the name of the new selection is within the available selections then update that selection:
            select_entity = select_entities[0]
            select_entity.selected_nodes = selected_nodes
        else:
            select_entity = MeasurePointsSelection(ui_name, selected_nodes,
                                                   datatype_gid, project_id)

        dao.store_entity(select_entity)

    ##########################################################################
    ##########    Bellow are PSE Filters specific methods   ##################
    ##########################################################################

    @staticmethod
    def get_stored_pse_filters(datatype_group_gid):
        return dao.get_stored_pse_filters(datatype_group_gid)

    @staticmethod
    def save_pse_filter(ui_name, datatype_group_gid, threshold_value,
                        applied_on):
        """
        Store in DB a PSE filter.
        """
        select_entities = dao.get_stored_pse_filters(datatype_group_gid,
                                                     ui_name)

        if select_entities:
            # when the UI name is already in DB, update the existing entity
            select_entity = select_entities[0]
            select_entity.threshold_value = threshold_value
            select_entity.applied_on = applied_on  # this is the type, as in applied on size or color
        else:
            select_entity = StoredPSEFilter(ui_name, datatype_group_gid,
                                            threshold_value, applied_on)

        dao.store_entity(select_entity)