Example #1
0
    def _remove_project_node_files(self, project_id, gid, links, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)

            if links:
                op = dao.get_operation_by_id(datatype.fk_from_operation)
                # Instead of deleting, we copy the datatype to the linked project
                # We also clone the operation
                new_operation = self.__copy_linked_datatype_before_delete(op, datatype, project,
                                                                          links[0].fk_to_project)

                # If there is a  datatype group and operation group and they were not moved yet to the linked project,
                # then do it
                if datatype.fk_datatype_group is not None:
                    dt_group_op = dao.get_operation_by_id(datatype.fk_from_operation)
                    op_group = dao.get_operationgroup_by_id(dt_group_op.fk_operation_group)
                    op_group.fk_launched_in = links[0].fk_to_project
                    dao.store_entity(op_group)

                    burst = dao.get_burst_for_operation_id(op.id)
                    if burst is not None:
                        burst.fk_project = links[0].fk_to_project
                        dao.store_entity(burst)

                    dt_group = dao.get_datatypegroup_by_op_group_id(op_group.id)
                    dt_group.parent_operation = new_operation
                    dt_group.fk_from_operation = new_operation.id
                    dao.store_entity(dt_group)

            else:
                # There is no link for this datatype so it has to be deleted
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)

                # Remove burst if dt has one and it still exists
                if datatype.fk_parent_burst is not None and datatype.is_ts:
                    burst = dao.get_burst_for_operation_id(datatype.fk_from_operation)

                    if burst is not None:
                        dao.remove_entity(BurstConfiguration, burst.id)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
Example #2
0
    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
 def stop_operation(operation_id, is_group=False, remove_after_stop=False):
     # type: (int, bool, bool) -> bool
     """
     Stop (also named Cancel) the operation given by operation_id,
     and potentially also remove it after (with all linked data).
     In case the Operation has a linked Burst, remove that too.
     :param operation_id: ID for Operation (or OperationGroup) to be canceled/removed
     :param is_group: When true stop all the operations from that group.
     :param remove_after_stop: if True, also remove the operation(s) after stopping
     :returns True if the stop step was successfully
     """
     result = False
     if is_group:
         op_group = ProjectService.get_operation_group_by_id(operation_id)
         operations_in_group = ProjectService.get_operations_in_group(op_group)
         for operation in operations_in_group:
             result = OperationService.stop_operation(operation.id, False, remove_after_stop) or result
     else:
         result = BackendClientFactory.stop_operation(operation_id)
         if remove_after_stop:
             burst_config = dao.get_burst_for_operation_id(operation_id)
             ProjectService().remove_operation(operation_id)
             if burst_config is not None:
                 result = dao.remove_entity(BurstConfiguration, burst_config.id) or result
     return result
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        LOGGER.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst,
                                                  error_message=str(excep))
Example #5
0
    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            ## Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        username = dao.get_user_by_id(operation.fk_launched_by).username
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = ProjectService._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, username, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        ## Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        LOGGER.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation, adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception as excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst, error_message=str(excep))
Example #7
0
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old +
                                             '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) +
                                 " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params,
                                              cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {
                        'value': epileptor_new
                    }
                    burst._simulator_configuration = json.dumps(
                        burst.simulator_configuration,
                        cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        # Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        run_params = [TvbProfile.current.PYTHON_INTERPRETER_PATH, '-m', 'tvb.core.operation_async_launcher',
                      str(operation_id), TvbProfile.CURRENT_PROFILE_NAME]

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:

            env = os.environ.copy()
            env['PYTHONPATH'] = os.pathsep.join(sys.path)
            # anything that was already in $PYTHONPATH should have been reproduced in sys.path

            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE, env=env)

            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = model.OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            subprocess_result = launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                workflow_service = WorkflowService()
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure! Exit code: %s Exit message: %s" % (returned,
                                                                                                   subprocess_result))

                workflow_service.persist_operation_state(operation, model.STATUS_ERROR,
                                                         "Operation failed unexpectedly! Please check the log files.")

                burst_entity = dao.get_burst_for_operation_id(self.operation_id)
                if burst_entity:
                    message = "Error in operation process! Possibly segmentation fault."
                    workflow_service.mark_burst_finished(burst_entity, error_message=message)

            del launched_process

        # Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        #Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        run_params = [TvbProfile.current.PYTHON_INTERPRETER_PATH, '-m', 'tvb.core.operation_async_launcher',
                      str(operation_id), TvbProfile.CURRENT_PROFILE_NAME]

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:

            env = os.environ.copy()
            env['PYTHONPATH'] = os.pathsep.join(sys.path)
            # anything that was already in $PYTHONPATH should have been reproduced in sys.path

            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE, env=env)

            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = model.OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            subprocess_result = launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                workflow_service = WorkflowService()
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure! Exit code: %s Exit message: %s" % (returned,
                                                                                                   subprocess_result))

                workflow_service.persist_operation_state(operation, model.STATUS_ERROR,
                                                         "Operation failed unexpectedly! Please check the log files.")

                burst_entity = dao.get_burst_for_operation_id(self.operation_id)
                if burst_entity:
                    message = "Error in operation process! Possibly segmentation fault."
                    workflow_service.mark_burst_finished(burst_entity, error_message=message)

            del launched_process

        #Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {'value': epileptor_new}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
Example #11
0
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        #Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        user_name_label = self.user_name_label
        run_params = [config().get_python_path(), '-m', 'tvb.core.cluster_launcher', str(operation_id), user_name_label]

        if tvb_profile.CURRENT_SELECTED_PROFILE is not None:
            run_params.extend([tvb_profile.SUBPARAM_PROFILE, tvb_profile.CURRENT_SELECTED_PROFILE])

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:
            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE)
            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = model.OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure with exit code: %s" % returned)

                operation.mark_complete(model.STATUS_ERROR,
                                        "Operation failed unexpectedly! Probably segmentation fault.")
                dao.store_entity(operation)

                burst_entity = dao.get_burst_for_operation_id(self.operation_id)
                if burst_entity:
                    message = "Error on burst operation! Probably segmentation fault."
                    WorkflowService().mark_burst_finished(burst_entity, error=True, error_message=message)

            del launched_process

        #Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        #Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        run_params = [config().get_python_path(), '-m', 'tvb.core.operation_async_launcher', str(operation_id)]

        if tvb_profile.CURRENT_SELECTED_PROFILE is not None:
            run_params.extend([tvb_profile.SUBPARAM_PROFILE, tvb_profile.CURRENT_SELECTED_PROFILE])

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:
            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE)
            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = model.OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure with exit code: %s" % returned)

                operation.mark_complete(model.STATUS_ERROR,
                                        "Operation failed unexpectedly! Probably segmentation fault.")
                dao.store_entity(operation)

                burst_entity = dao.get_burst_for_operation_id(self.operation_id)
                if burst_entity:
                    message = "Error on burst operation! Probably segmentation fault."
                    WorkflowService().mark_burst_finished(burst_entity, error=True, error_message=message)

            del launched_process

        #Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
Example #13
0
    def __copy_linked_datatype_before_delete(self, op, datatype, project, fk_to_project):
        new_op = Operation(op.view_model_gid,
                           dao.get_system_user().id,
                           fk_to_project,
                           datatype.parent_operation.fk_from_algo,
                           datatype.parent_operation.status,
                           datatype.parent_operation.start_date,
                           datatype.parent_operation.completion_date,
                           datatype.parent_operation.fk_operation_group,
                           datatype.parent_operation.additional_info,
                           datatype.parent_operation.user_group,
                           datatype.parent_operation.range_values)
        new_op.visible = datatype.parent_operation.visible
        new_op = dao.store_entity(new_op)
        to_project = self.find_project(fk_to_project)
        to_project_path = self.storage_interface.get_project_folder(to_project.name)

        full_path = h5.path_for_stored_index(datatype)
        old_folder = self.storage_interface.get_project_folder(project.name, str(op.id))
        file_paths = h5.gather_references_of_view_model(op.view_model_gid, old_folder, only_view_models=True)[0]
        file_paths.append(full_path)

        # The BurstConfiguration h5 file has to be moved only when we handle the time series which has the operation
        # folder containing the file
        if datatype.is_ts and datatype.fk_parent_burst is not None:
            bc_path = h5.path_for(datatype.parent_operation.id, BurstConfigurationH5, datatype.fk_parent_burst,
                                  project.name)
            if os.path.exists(bc_path):
                file_paths.append(bc_path)

                bc = dao.get_burst_for_operation_id(op.id)
                bc.fk_simulation = new_op.id
                dao.store_entity(bc)

        # Move all files to the new operation folder
        self.storage_interface.move_datatype_with_sync(to_project, to_project_path, new_op.id, file_paths)

        datatype.fk_from_operation = new_op.id
        datatype.parent_operation = new_op
        dao.store_entity(datatype)

        return new_op
Example #14
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
Example #15
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        datatype_group = None
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            datatype_group = datatype
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype_group = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            operations_set = [datatype_group.fk_from_operation]
            self.logger.debug("Removing datatype group %s" % datatype_group)
            if datatype_group.fk_parent_burst:
                burst = dao.get_generic_entity(BurstConfiguration, datatype_group.fk_parent_burst, 'gid')[0]
                dao.remove_entity(BurstConfiguration, burst.id)
                if burst.fk_metric_operation_group:
                    correct = correct and self._remove_operation_group(burst.fk_metric_operation_group, project_id,
                                                                       skip_validation, operations_set)

                if burst.fk_operation_group:
                    correct = correct and self._remove_operation_group(burst.fk_operation_group, project_id,
                                                                       skip_validation, operations_set)

            else:
                self._remove_datatype_group_dts(project_id, datatype_group.id, skip_validation, operations_set)

                datatype_group = dao.get_datatype_group_by_gid(datatype_group.gid)
                dao.remove_entity(DataTypeGroup, datatype.id)
                correct = correct and dao.remove_entity(OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            op_burst = dao.get_burst_for_operation_id(operation_id)
            if op_burst:
                correct = correct and dao.remove_entity(BurstConfiguration, op_burst.id)
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, operation_id)

        encryption_handler.push_folder_to_sync(self.structure_helper.get_project_folder(project))
        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
Example #16
0
 def load_operation(operation_id):
     """ Retrieve previously stored Operation from DB, and load operation.burst attribute"""
     operation = dao.get_operation_by_id(operation_id)
     operation.burst = dao.get_burst_for_operation_id(operation_id)
     return operation
Example #17
0
 def _update_burst_metric(self, operation_entity):
     burst_config = dao.get_burst_for_operation_id(operation_entity.id)
     if burst_config and burst_config.ranges:
         if burst_config.fk_metric_operation_group is None:
             burst_config.fk_metric_operation_group = operation_entity.fk_operation_group
         dao.store_entity(burst_config)
Example #18
0
 def load_operation(operation_id):
     """ Retrieve loaded OPeration from DB"""
     operation = dao.get_operation_by_id(operation_id)
     operation.burst = dao.get_burst_for_operation_id(operation_id)
     return operation
 def load_operation(operation_id):
     """ Retrieve loaded OPeration from DB"""
     operation = dao.get_operation_by_id(operation_id)
     operation.burst = dao.get_burst_for_operation_id(operation_id)
     return operation
Example #20
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[14]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid

                        ## Filter only viewers for current DataTypeGroup entity:
                        launcher = self.retrieve_launchers(datatype_group.gid,
                                                           include_categories=[view_categ_id]).values()[0]
                        view_groups = []
                        for launcher in launcher.values():
                            url = '/flow/' + str(launcher['category']) + '/' + str(launcher['id'])
                            if launcher['part_of_group']:
                                url = '/flow/prepare_group_launch/' + datatype_group.gid + '/' + \
                                      str(launcher['category']) + '/' + str(launcher['id'])
                            view_groups.append(dict(name=launcher["displayName"],
                                                    url=url,
                                                    param_name=launcher['children'][0]['param_name'],
                                                    part_of_group=launcher['part_of_group']))
                        result["view_groups"] = view_groups

                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["method"] = one_op[5]
                result["user"] = dao.get_user_by_id(one_op[6])
                if type(one_op[7]) in (str, unicode):
                    result["create"] = string2date(str(one_op[7]))
                else:
                    result["create"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["start"] = string2date(str(one_op[8]))
                else:
                    result["start"] = one_op[8]
                if type(one_op[9]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[9]))
                else:
                    result["complete"] = one_op[9]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[10]
                result["additional"] = one_op[11]
                result["visible"] = True if one_op[12] > 0 else False
                result['operation_tag'] = one_op[13]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type,
                                                                dt.gid, 'gid')[0] for dt in datatype_results]
                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)

        if total_filtered >= start_idx + OPERATIONS_PAGE_SIZE:
            end_idx = OPERATIONS_PAGE_SIZE
        else:
            end_idx = total_filtered - start_idx

        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, end_idx)
        started_ops = 0
        if current_ops is None:
            return selected_project, [], 0
        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[14]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        datatype = dao.get_datatype_by_id(datatype_group.id)
                        result["datatype_group_gid"] = datatype.gid
                        result["gid"] = operation_group.gid
                        
                        all_categs = dao.get_algorithm_categories()
                        view_categ = dao.get_visualisers_categories()[0]
                        excludes = [categ.id for categ in all_categs if categ.id != view_categ.id]
                        algo = self.retrieve_launchers("DataTypeGroup", datatype.gid,
                                                       exclude_categories=excludes).values()[0]

                        view_groups = []
                        for algo in algo.values():
                            url = '/flow/' + str(algo['category']) + '/' + str(algo['id'])
                            if algo['part_of_group']:
                                url = '/flow/prepare_group_launch/' + datatype.gid + '/' + \
                                      str(algo['category']) + '/' + str(algo['id'])
                            view_groups.append(dict(name=algo["displayName"],
                                                    url=url,
                                                    param_name=algo['children'][0]['param_name'],
                                                    part_of_group=algo['part_of_group']))
                        result["view_groups"] = view_groups

                    except Exception, excep:
                        self.logger.error(excep)
                        self.logger.warning("Will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["method"] = one_op[5]
                result["user"] = dao.get_user_by_id(one_op[6])
                if type(one_op[7]) in (str, unicode):
                    result["create"] = string2date(str(one_op[7]))
                else:
                    result["create"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["start"] = string2date(str(one_op[8]))
                else:
                    result["start"] = one_op[8]
                if type(one_op[9]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[9]))
                else:
                    result["complete"] = one_op[9]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = timedelta2string(result["complete"] - result["start"])
                result["status"] = one_op[10]
                if result["status"] == model.STATUS_STARTED:
                    started_ops += 1
                result["additional"] = one_op[11]
                result["visible"] = True if one_op[12] > 0 else False
                result['operation_tag'] = one_op[13]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type,
                                                                dt.gid, 'gid')[0] for dt in datatype_results]
                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name,
                                                                                 figure.operation.id)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser 
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
Example #22
0
 def load_operation(operation_id):
     """ Retrieve previously stored Operation from DB, and load operation.burst attribute"""
     operation = dao.get_operation_by_id(operation_id)
     operation.burst = dao.get_burst_for_operation_id(operation_id)
     return operation
Example #23
0
 def get_burst_for_operation_id(operation_id):
     return dao.get_burst_for_operation_id(operation_id)
def _adapt_simulation_monitor_params():
    """
    For previous simulation with EEG monitor, adjust the change of input parameters.
    """
    session = SA_SESSIONMAKER()

    param_connectivity = "connectivity"
    param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
    param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
    param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
    param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"

    try:
        all_eeg_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for eeg_op in all_eeg_ops:
            try:
                op_params = parse_json_parameters(eeg_op.parameters)
                LOGGER.debug("Updating " + str(op_params))
                old_projection_guid = op_params[param_eeg_proj_old]
                connectivity_guid = op_params[param_connectivity]

                rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0]
                dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0]

                if dt.type == 'ProjectionSurfaceEEG':
                    LOGGER.debug("Previous Prj is surface: " + old_projection_guid)
                    new_projection_guid = old_projection_guid
                else:
                    new_projection_guid = session.execute(text("""SELECT DT.gid
                            FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
                                 "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
                            WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
                                  PM._sources='""" + rm._surface + """' and
                                  DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0]
                    LOGGER.debug("New Prj is surface: " + str(new_projection_guid))

                sensors_guid = session.execute(text("""SELECT _sensors
                            FROM "MAPPED_PROJECTION_MATRIX_DATA"
                            WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0]

                del op_params[param_eeg_proj_old]
                op_params[param_eeg_proj_new] = str(new_projection_guid)
                op_params[param_eeg_sensors] = str(sensors_guid)
                op_params[param_eeg_rm] = str(rm.gid)

                eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + eeg_op.parameters)
                files_helper.write_operation_metadata(eeg_op)

                burst = dao.get_burst_for_operation_id(eeg_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    del burst.simulator_configuration[param_eeg_proj_old]
                    burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)}
                    burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)}
                    burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(eeg_op))

        session.add_all(all_eeg_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Monitor Params")
    finally:
        session.close()
Example #25
0
 def get_burst_for_operation_id(operation_id, is_group=False):
     return dao.get_burst_for_operation_id(operation_id, is_group)
Example #26
0
def _adapt_simulation_monitor_params():
    """
    For previous simulation with EEG monitor, adjust the change of input parameters.
    """
    session = SA_SESSIONMAKER()

    param_connectivity = "connectivity"
    param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
    param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
    param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
    param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"

    try:
        all_eeg_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for eeg_op in all_eeg_ops:
            try:
                op_params = parse_json_parameters(eeg_op.parameters)
                LOGGER.debug("Updating " + str(op_params))
                old_projection_guid = op_params[param_eeg_proj_old]
                connectivity_guid = op_params[param_connectivity]

                rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0]
                dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0]

                if dt.type == 'ProjectionSurfaceEEG':
                    LOGGER.debug("Previous Prj is surface: " + old_projection_guid)
                    new_projection_guid = old_projection_guid
                else:
                    new_projection_guid = session.execute(text("""SELECT DT.gid
                            FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
                                 "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
                            WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
                                  PM._sources='""" + rm._surface + """' and
                                  DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0]
                    LOGGER.debug("New Prj is surface: " + str(new_projection_guid))

                sensors_guid = session.execute(text("""SELECT _sensors
                            FROM "MAPPED_PROJECTION_MATRIX_DATA"
                            WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0]

                del op_params[param_eeg_proj_old]
                op_params[param_eeg_proj_new] = str(new_projection_guid)
                op_params[param_eeg_sensors] = str(sensors_guid)
                op_params[param_eeg_rm] = str(rm.gid)

                eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + eeg_op.parameters)
                files_helper.write_operation_metadata(eeg_op)

                burst = dao.get_burst_for_operation_id(eeg_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    del burst.simulator_configuration[param_eeg_proj_old]
                    burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)}
                    burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)}
                    burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if burst.id not in all_bursts:
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(eeg_op))

        session.add_all(all_eeg_ops)
        session.add_all(list(all_bursts.values()))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Monitor Params")
    finally:
        session.close()
Example #27
0
    def retrieve_project_full(self,
                              project_id,
                              applied_filters=None,
                              current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(
            project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (
            1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters,
                                                  start_idx,
                                                  OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                operation_group_id = one_op[3]
                if operation_group_id is not None and operation_group_id:
                    try:
                        operation_group = dao.get_generic_entity(
                            OperationGroup, operation_group_id)[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(
                            operation_group_id)
                        result[
                            "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None
                        result["gid"] = operation_group.gid
                        # Filter only viewers for current DataTypeGroup entity:

                        if datatype_group is None:
                            view_groups = None
                        else:
                            view_groups = AlgorithmService(
                            ).get_visualizers_for_group(datatype_group.gid)
                        result["view_groups"] = view_groups
                    except Exception:
                        self.logger.exception(
                            "We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result[
                        "start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] -
                                                          result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                if not result['group']:
                    result['results'] = dao.get_results_for_operation(
                        result['id'])
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                # We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception(
                    "Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no