Пример #1
0
    def __init__(self):
        BurstBaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree and Algorithm for performance issues.
        self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        # Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        run_params = [TvbProfile.current.PYTHON_INTERPRETER_PATH, '-m', 'tvb.core.operation_async_launcher',
                      str(operation_id), TvbProfile.CURRENT_PROFILE_NAME]

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:

            env = os.environ.copy()
            env['PYTHONPATH'] = os.pathsep.join(sys.path)
            # anything that was already in $PYTHONPATH should have been reproduced in sys.path

            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE, env=env)

            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = model.OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            subprocess_result = launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                workflow_service = WorkflowService()
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure! Exit code: %s Exit message: %s" % (returned,
                                                                                                   subprocess_result))

                workflow_service.persist_operation_state(operation, model.STATUS_ERROR,
                                                         "Operation failed unexpectedly! Please check the log files.")

                burst_entity = dao.get_burst_for_operation_id(self.operation_id)
                if burst_entity:
                    message = "Error in operation process! Possibly segmentation fault."
                    workflow_service.mark_burst_finished(burst_entity, error_message=message)

            del launched_process

        # Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
 def setUp(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
Пример #4
0
    def stop_operation(operation_id):
        """
        Stop a thread for a given operation id
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if not operation or operation.has_finished:
            LOGGER.warning(
                "Operation already stopped or not found is given to stop job: %s"
                % operation_id)
            return True

        operation_process = dao.get_operation_process_for_operation(
            operation_id)
        result = 0
        # Try to kill only if operation job process is not None
        if operation_process is not None:
            stop_command = TvbProfile.current.cluster.STOP_COMMAND % operation_process.job_id
            LOGGER.info("Stopping cluster operation: %s" % stop_command)
            result = os.system(stop_command)
            if result != 0:
                LOGGER.error(
                    "Stopping cluster operation was unsuccessful. Try following status with '"
                    + TvbProfile.current.cluster.STATUS_COMMAND +
                    "'" % operation_process.job_id)

        WorkflowService().persist_operation_state(operation,
                                                  model.STATUS_CANCELED)

        return result == 0
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        LOGGER.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst,
                                                  error_message=str(excep))
Пример #6
0
    def stop_operation(operation_id):
        """
        Stop a thread for a given operation id
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if not operation or operation.has_finished:
            LOGGER.warning("Operation already stopped or not found is given to stop job: %s" % operation_id)
            return True

        LOGGER.debug("Stopping operation: %s" % str(operation_id))

        ## Set the thread stop flag to true
        for thread in CURRENT_ACTIVE_THREADS:
            if int(thread.operation_id) == operation_id:
                thread.stop()
                LOGGER.debug("Found running thread for operation: %d" % operation_id)

        ## Kill Thread
        stopped = True
        operation_process = dao.get_operation_process_for_operation(operation_id)
        if operation_process is not None:
            ## Now try to kill the operation if it exists
            stopped = OperationExecutor.stop_pid(operation_process.pid)
            if not stopped:
                LOGGER.debug("Operation %d was probably killed from it's specific thread." % operation_id)
            else:
                LOGGER.debug("Stopped OperationExecutor process for %d" % operation_id)

        ## Mark operation as canceled in DB and on disk
        WorkflowService().persist_operation_state(operation, model.STATUS_CANCELED)

        return stopped
Пример #7
0
    def __init__(self):
        BurstBaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree and Algorithm for performance issues.
        self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE,
                                                                                              SIMULATOR_CLASS)
Пример #8
0
 def transactional_setup_method(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
Пример #9
0
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        #Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        run_params = [TvbProfile.current.PYTHON_INTERPRETER_PATH, '-m', 'tvb.core.operation_async_launcher',
                      str(operation_id), TvbProfile.CURRENT_PROFILE_NAME]

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:

            env = os.environ.copy()
            env['PYTHONPATH'] = os.pathsep.join(sys.path)
            # anything that was already in $PYTHONPATH should have been reproduced in sys.path

            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE, env=env)

            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = model.OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            subprocess_result = launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                workflow_service = WorkflowService()
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure! Exit code: %s Exit message: %s" % (returned,
                                                                                                   subprocess_result))

                workflow_service.persist_operation_state(operation, model.STATUS_ERROR,
                                                         "Operation failed unexpectedly! Please check the log files.")

                burst_entity = dao.get_burst_for_operation_id(self.operation_id)
                if burst_entity:
                    message = "Error in operation process! Possibly segmentation fault."
                    workflow_service.mark_burst_finished(burst_entity, error_message=message)

            del launched_process

        #Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
Пример #10
0
 def __init__(self):
     self.operation_service = OperationService()
     self.workflow_service = WorkflowService()
     self.logger = get_logger(self.__class__.__module__)
Пример #11
0
class BurstService:
    """
    Service layer for Burst related entities.
    """

    def __init__(self):
        self.operation_service = OperationService()
        self.workflow_service = WorkflowService()
        self.logger = get_logger(self.__class__.__module__)

    def build_portlet_interface(self, portlet_configuration, project_id):
        """
        From a portlet_id and a project_id, first build the portlet
        entity then get it's configurable interface. 
        
        :param portlet_configuration: a portlet configuration entity. It holds at the
            least the portlet_id, and in case any default parameters were saved
            they can be rebuilt from the analyzers // visualizer parameters
        :param project_id: the id of the current project   
            
        :returns: the portlet interface will be of the following form::
            [{'interface': adapter_interface, 
            'prefix': prefix_for_parameter_names, 
            'subalg': {algorithm_field_name: default_algorithm_value},
            'algo_group': algorithm_group,
            'alg_ui_name': displayname},
            ......]
            A list of dictionaries for each adapter that makes up the portlet.
            
        """
        portlet_entity = dao.get_portlet_by_id(portlet_configuration.portlet_id)
        if portlet_entity is None:
            raise InvalidPortletConfiguration(
                "No portlet entity located in database with id=%s. "
                "Portlet configuration %s is not valid." % (portlet_configuration.portlet_id, portlet_configuration)
            )
        portlet_configurer = PortletConfigurer(portlet_entity)
        portlet_interface = portlet_configurer.get_configurable_interface()
        self.logger.debug("Created interface for portlet " + str([portlet_entity]))

        for adapter_conf in portlet_interface:
            interface = adapter_conf.interface
            interface = FlowService().prepare_parameters(interface, project_id, adapter_conf.group.fk_category)
            interface = ABCAdapter.prepare_param_names(interface, adapter_conf.prefix)
            adapter_conf.interface = interface

        portlet_configurer.update_default_values(portlet_interface, portlet_configuration)
        portlet_configurer.prefix_adapters_parameters(portlet_interface)

        return portlet_interface

    @staticmethod
    def update_portlet_configuration(portlet_configuration, submited_parameters):
        """
        :param portlet_configuration: the portlet configuration that needs to be updated
        :param submited_parameters: a list of parameters as submitted from the UI. This 
            is a dictionary in the form : 
            {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}}
            
        All names are prefixed with adapter specific generated prefix.
        """
        portlet_entity = dao.get_portlet_by_id(portlet_configuration.portlet_id)
        portlet_configurer = PortletConfigurer(portlet_entity)
        return portlet_configurer.update_portlet_configuration(portlet_configuration, submited_parameters)

    @staticmethod
    def new_burst_configuration(project_id):
        """
        Return a new burst configuration entity with all the default values.
        """
        burst_configuration = model.BurstConfiguration(project_id)
        burst_configuration.selected_tab = 0
        BurstService.set_default_portlets(burst_configuration)
        return burst_configuration

    @staticmethod
    def set_default_portlets(burst_configuration):
        """
        Sets the default portlets for the specified burst configuration.
        The default portlets are specified in the __init__.py script from tvb root.
        """
        for tab_idx, value in DEFAULT_PORTLETS.items():
            for sel_idx, portlet_identifier in value.items():
                portlet = BurstService.get_portlet_by_identifier(portlet_identifier)
                if portlet is not None:
                    portlet_configuration = BurstService.new_portlet_configuration(
                        portlet.id, tab_idx, sel_idx, portlet.algorithm_identifier
                    )
                    burst_configuration.set_portlet(tab_idx, sel_idx, portlet_configuration)

    @staticmethod
    def _store_burst_config(burst_config):
        """
        Store a burst configuration entity.
        """
        burst_config.prepare_before_save()
        saved_entity = dao.store_entity(burst_config)
        return saved_entity.id

    @staticmethod
    def get_available_bursts(project_id):
        """
        Return all the burst for the current project.
        """
        bursts = dao.get_bursts_for_project(project_id, page_size=MAX_BURSTS_DISPLAYED) or []
        for burst in bursts:
            burst.prepare_after_load()
        return bursts

    @staticmethod
    def populate_burst_disk_usage(bursts):
        """
        Adds a disk_usage field to each burst object.
        The disk usage is computed as the sum of the datatypes generated by a burst
        """
        sizes = dao.compute_bursts_disk_size([b.id for b in bursts])
        for b in bursts:
            b.disk_size = format_bytes_human(sizes[b.id])

    @staticmethod
    def rename_burst(burst_id, new_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.name = new_name
        dao.store_entity(burst)

    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid

    @staticmethod
    def __populate_tabs_from_workflow(burst_entity, workflow):
        """
        Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations
        generated from the steps of the workflow.
        """
        visualizers = dao.get_visualization_steps(workflow.id)
        for entry in visualizers:
            ## For each visualize step, also load all of the analyze steps.
            portlet_cfg = PortletConfiguration(entry.fk_portlet)
            portlet_cfg.set_visualizer(entry)
            analyzers = dao.get_workflow_steps_for_position(entry.fk_workflow, entry.tab_index, entry.index_in_tab)
            portlet_cfg.set_analyzers(analyzers)
            burst_entity.tabs[entry.tab_index].portlets[entry.index_in_tab] = portlet_cfg
        return burst_entity

    def load_tab_configuration(self, burst_entity, op_id):
        """
        Given a burst entity and an operation id, find the workflow to which the op_id
        belongs and the load the burst_entity's tab configuration with those workflow steps.
        """
        originating_workflow = dao.get_workflow_for_operation_id(op_id)
        burst_entity = self.__populate_tabs_from_workflow(burst_entity, originating_workflow)
        return burst_entity

    @staticmethod
    def new_portlet_configuration(portlet_id, tab_nr=-1, index_in_tab=-1, portlet_name="Default"):
        """
        Return a new portlet configuration entitiy with default parameters.
        
        :param portlet_id: the id of the portlet for which a configuration will
            be stored
        :param tab_nr: the index of the currently selected tab
        :param index_in_tab: the index from the currently selected tab
        
        """
        portlet_entity = dao.get_portlet_by_id(portlet_id)
        if portlet_entity is None:
            raise InvalidPortletConfiguration("No portlet entity located in database with id=%s." % portlet_id)
        portlet_configurer = PortletConfigurer(portlet_entity)
        configuration = portlet_configurer.create_new_portlet_configuration(portlet_name)
        for wf_step in configuration.analyzers:
            wf_step.tab_index = tab_nr
            wf_step.index_in_tab = index_in_tab
        configuration.visualizer.tab_index = tab_nr
        configuration.visualizer.index_in_tab = index_in_tab
        return configuration

    @staticmethod
    def get_available_portlets():
        """
        :returns: a list of all the available portlet entites
        """
        return dao.get_available_portlets()

    @staticmethod
    def get_portlet_by_id(portlet_id):
        """
        :returns: the portlet entity with the id =@portlet_id
        """
        return dao.get_portlet_by_id(portlet_id)

    @staticmethod
    def get_portlet_by_identifier(portlet_identifier):
        """
        :returns: the portlet entity with the algorithm identifier =@portlet_identifier
        """
        return dao.get_portlet_by_identifier(portlet_identifier)

    def launch_burst(self, burst_configuration, simulator_index, simulator_id, user_id, launch_mode=LAUNCH_NEW):
        """
        Given a burst configuration and all the necessary data do the actual launch.
        
        :param burst_configuration: BurstConfiguration   
        :param simulator_index: the position within the workflows step list that the simulator will take. This is needed
            so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from.
        :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo
            group and category that are then passed to the launcher's prepare_operation method.
        :param user_id: the id of the user that launched this burst
        :param launch_mode: new/branch/continue
        """
        ## 1. Prepare BurstConfiguration entity
        if launch_mode == LAUNCH_NEW:
            ## Fully new entity for new simulation
            burst_config = burst_configuration.clone()
            if burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                burst_config.name = "simulation_" + str(new_id)
        else:
            ## Branch or Continue simulation
            burst_config = burst_configuration
            simulation_state = dao.get_generic_entity(
                SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS, burst_config.id, "fk_parent_burst"
            )
            if simulation_state is None or len(simulation_state) < 1:
                exc = BurstServiceException(
                    "Simulation State not found for %s, " "thus we are unable to branch from it!" % burst_config.name
                )
                self.logger.error(exc)
                raise exc

            simulation_state = simulation_state[0]
            burst_config.update_simulation_parameter("simulation_state", simulation_state.gid)
            burst_config = burst_configuration.clone()

            count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project)
            burst_config.name = burst_config.name + "_" + launch_mode + str(count)

        ## 2. Create Operations and do the actual launch
        if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]:
            ## New Burst entry in the history
            burst_id = self._store_burst_config(burst_config)
            thread = threading.Thread(
                target=self._async_launch_and_prepare,
                kwargs={
                    "burst_config": burst_config,
                    "simulator_index": simulator_index,
                    "simulator_id": simulator_id,
                    "user_id": user_id,
                },
            )
            thread.start()
            return burst_id, burst_config.name
        else:
            ## Continue simulation
            ## TODO
            return burst_config.id, burst_config.name

    @transactional
    def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id):
        """
        Prepare all required operations for burst launch.
        """
        project_id = burst_config.fk_project
        burst_id = burst_config.id
        workflow_step_list = []
        starting_index = simulator_index + 1

        sim_algo = FlowService().get_algorithm_by_identifier(simulator_id)
        metadata = {DataTypeMetaData.KEY_BURST: burst_id}
        launch_data = burst_config.get_all_simulator_values()[0]
        operations, group = self.operation_service.prepare_operations(
            user_id, project_id, sim_algo, sim_algo.algo_group.group_category, metadata, **launch_data
        )
        group_launched = group is not None
        if group_launched:
            starting_index += 1

        for tab in burst_config.tabs:
            for portlet_cfg in tab.portlets:
                ### For each portlet configuration stored, update the step index ###
                ### and also change the dynamic parameters step indexes to point ###
                ### to the simulator outputs.                                     ##
                if portlet_cfg is not None:
                    analyzers = portlet_cfg.analyzers
                    visualizer = portlet_cfg.visualizer
                    for entry in analyzers:
                        entry.step_index = starting_index
                        self.workflow_service.set_dynamic_step_references(entry, simulator_index)
                        workflow_step_list.append(entry)
                        starting_index += 1
                    ### Change the dynamic parameters to point to the last adapter from this portlet execution.
                    visualizer.step_visible = False
                    if len(workflow_step_list) > 0 and isinstance(workflow_step_list[-1], model.WorkflowStep):
                        self.workflow_service.set_dynamic_step_references(visualizer, workflow_step_list[-1].step_index)
                    else:
                        self.workflow_service.set_dynamic_step_references(visualizer, simulator_index)
                    ### Only for a single operation have the step of visualization, otherwise is useless.
                    if not group_launched:
                        workflow_step_list.append(visualizer)

        if group_launched:
            ###  For a group of operations, make sure the metric for PSE view
            ### is also computed, immediately after the simulation.
            metric_algo, metric_group = FlowService().get_algorithm_by_module_and_class(
                MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS
            )
            _, metric_interface = FlowService().prepare_adapter(project_id, metric_group)
            dynamics = {}
            for entry in metric_interface:
                # We have a select that should be the dataType and a select multiple with the
                # required metric algorithms to be evaluated. Only dynamic parameter should be
                # the select type.
                if entry[ABCAdapter.KEY_TYPE] == "select":
                    dynamics[entry[ABCAdapter.KEY_NAME]] = {
                        WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0,
                        WorkflowStepConfiguration.STEP_INDEX_KEY: simulator_index,
                    }
            metric_step = model.WorkflowStep(
                algorithm_id=metric_algo.id, step_index=simulator_index + 1, static_param={}, dynamic_param=dynamics
            )
            metric_step.step_visible = False
            workflow_step_list.insert(0, metric_step)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id, burst_id, simulator_index, simulator_id, operations
        )
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, user_id, burst_id, project_id, group, operations
        )
        operation_ids = [operation.id for operation in operations]
        return operation_ids

    def _async_launch_and_prepare(self, burst_config, simulator_index, simulator_id, user_id):
        """
        Prepare operations asynchronously.
        """
        try:
            operation_ids = self._prepare_operations(burst_config, simulator_index, simulator_id, user_id)
            self.logger.debug("Starting a total of %s workflows" % (len(operation_ids)))
            wf_errs = 0
            for operation_id in operation_ids:
                try:
                    OperationService().launch_operation(operation_id, True)
                except Exception, excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep))

            self.logger.debug(
                "Finished launching workflows. "
                + str(len(operation_ids) - wf_errs)
                + " were launched successfully, "
                + str(wf_errs)
                + " had error on pre-launch steps"
            )
        except Exception, excep:
            self.logger.error(excep)
            self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep))
class BurstController(base.BaseController):
    """
    Controller class for Burst-Pages.
    """


    def __init__(self):
        base.BaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree, Algorithm and AlgorithmGroup, for performance issues.
        algorithm, self.cached_simulator_algo_group = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.cached_simulator_algorithm_id = algorithm.id


    @property
    @context_selected()
    def cached_simulator_input_tree(self):
        """
        Cache Simulator's input tree, for performance issues.
        Anyway, without restart, the introspected tree will not be different on multiple executions.
        :returns: Simulator's Input Tree (copy from cache or just loaded)
        """
        cached_simulator_tree = base.get_from_session(base.KEY_CACHED_SIMULATOR_TREE)
        if cached_simulator_tree is None:
            cached_simulator_tree = self.flow_service.prepare_adapter(base.get_current_project().id,
                                                                      self.cached_simulator_algo_group)[1]
            base.add2session(base.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree)
        return copy.deepcopy(cached_simulator_tree)


    @cherrypy.expose
    @using_template('base_template')
    @base.settings()
    @logged()
    @context_selected()
    def index(self):
        """Get on burst main page"""
        template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit",
                                      baseUrl=cfg.BASE_URL, includedResources='project/included_resources')
        portlets_list = self.burst_service.get_available_portlets()
        session_stored_burst = base.get_from_session(base.KEY_BURST_CONFIG)
        if session_stored_burst is None or session_stored_burst.id is None:
            if session_stored_burst is None:
                session_stored_burst = self.burst_service.new_burst_configuration(base.get_current_project().id)
                base.add2session(base.KEY_BURST_CONFIG, session_stored_burst)

            adapter_interface = self.cached_simulator_input_tree
            if session_stored_burst is not None:
                current_data = session_stored_burst.get_all_simulator_values()[0]
                adapter_interface = ABCAdapter.fill_defaults(adapter_interface, current_data, True)
                ### Add simulator tree to session to be available in filters
                self.context.add_adapter_to_session(self.cached_simulator_algo_group, adapter_interface, current_data)
            template_specification['inputList'] = adapter_interface

        selected_portlets = session_stored_burst.update_selected_portlets()
        template_specification['burst_list'] = self.burst_service.get_available_bursts(base.get_current_project().id)
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(selected_portlets)
        template_specification['draw_hidden_ranges'] = True
        template_specification['burstConfig'] = session_stored_burst

        ### Prepare PSE available metrics
        ### We put here all available algorithms, because the metrics select area is a generic one, 
        ### and not loaded with every Burst Group change in history.
        algo_group = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE,
                                                                         MEASURE_METRICS_CLASS)[1]
        adapter_instance = ABCAdapter.build_adapter(algo_group)
        if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'):
            template_specification['available_metrics'] = [metric_name for metric_name
                                                           in adapter_instance.available_algorithms.keys()]
        else:
            template_specification['available_metrics'] = []

        template_specification[base.KEY_PARAMETERS_CONFIG] = False
        template_specification[base.KEY_SECTION] = 'burst'
        return self.fill_default_attributes(template_specification)


    @cherrypy.expose
    @using_template('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = base.get_from_session(base.KEY_BURST_CONFIG)
        return {'burst_list': self.burst_service.get_available_bursts(base.get_current_project().id),
                'selectedBurst': session_burst.id}


    @cherrypy.expose
    @ajax_call(False)
    def get_selected_burst(self):
        """
        Return the burst that is currently stored in session.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = base.get_from_session(base.KEY_BURST_CONFIG)
        if session_burst.id:
            return str(session_burst.id)
        else:
            return 'None'


    @cherrypy.expose
    @using_template('burst/portlet_configure_parameters')
    def get_portlet_configurable_interface(self, index_in_tab):
        """
        From the position given by the tab index and the index from that tab, 
        get the portlet configuration and build the configurable interface
        for that portlet.
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        tab_index = burst_config.selected_tab
        portlet_config = burst_config.tabs[tab_index].portlets[int(index_in_tab)]
        portlet_interface = self.burst_service.build_portlet_interface(portlet_config, base.get_current_project().id)

        full_portlet_input_tree = []
        for entry in portlet_interface:
            full_portlet_input_tree.extend(entry.interface)
        self.context.add_portlet_to_session(full_portlet_input_tree)

        portlet_interface = {"adapters_list": portlet_interface,
                             base.KEY_PARAMETERS_CONFIG: False,
                             base.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION}
        return self.fill_default_attributes(portlet_interface)


    @cherrypy.expose
    @using_template('burst/portlets_preview')
    def portlet_tab_display(self, **data):
        """
        When saving a new configuration of tabs, check if any of the old 
        portlets are still present, and if that is the case use their 
        parameters configuration. 
        
        For all the new portlets add entries in the burst configuration. 
        Also remove old portlets that are no longer saved.
        """
        tab_portlets_list = json.loads(data['tab_portlets_list'])
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        selected_tab_idx = burst_config.selected_tab
        for tab_idx in xrange(len(tab_portlets_list)):
            current_tab = burst_config.tabs[tab_idx]
            ### When configuration already exists, and new portlets          #####
            ### are selected, first check if any configuration was saved for #####
            ### each portlet and if that is the case, use it. If none is present #
            ### create a new one.                                              ###
            for idx_in_tab in xrange(len(tab_portlets_list[tab_idx])):
                portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0]
                portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1]
                if portlet_id >= 0:
                    saved_config = current_tab.portlets[idx_in_tab]
                    if saved_config is None or saved_config.portlet_id != portlet_id:
                        current_tab.portlets[idx_in_tab] = self.burst_service.new_portlet_configuration(portlet_id,
                                                                                    tab_idx, idx_in_tab, portlet_name)
                    else:
                        saved_config.visualizer.ui_name = portlet_name
                else:
                    current_tab.portlets[idx_in_tab] = None
            #For generating the HTML get for each id the corresponding portlet
        selected_tab_portlets = []
        saved_selected_tab = burst_config.tabs[selected_tab_idx]
        for portlet in saved_selected_tab.portlets:
            if portlet:
                portlet_id = int(portlet.portlet_id)
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_id)
                portlet_entity.name = portlet.name
                selected_tab_portlets.append(portlet_entity)

        return {'portlet_tab_list': selected_tab_portlets}


    @cherrypy.expose
    @using_template('burst/portlets_preview')
    def get_configured_portlets(self):
        """
        Return the portlets for one given tab. This is used when changing
        from tab to tab and selecting which portlets will be displayed.
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        if burst_config is None:
            return {'portlet_tab_list': []}

        tab_idx = burst_config.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets:
            if portlet_cfg is not None:
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id)
                portlet_entity.name = portlet_cfg.name
                tab_portlet_list.append(portlet_entity)
        return {'portlet_tab_list': tab_portlet_list}


    @cherrypy.expose
    @ajax_call()
    def change_selected_tab(self, tab_nr):
        """
        Set :param tab_nr: as the currently selected tab in the stored burst
        configuration. 
        """
        base.get_from_session(base.KEY_BURST_CONFIG).selected_tab = int(tab_nr)


    @cherrypy.expose
    @ajax_call()
    def get_portlet_session_configuration(self):
        """
        Get the current configuration of portlets stored in session for this burst,
        as a json.
        """
        burst_entity = base.get_from_session(base.KEY_BURST_CONFIG)
        returned_configuration = burst_entity.update_selected_portlets()
        return returned_configuration


    @cherrypy.expose
    @ajax_call(False)
    def save_parameters(self, index_in_tab, **data):
        """
        Save parameters
        
        :param tab_nr: the index of the selected tab
        :param index_in_tab: the index of the configured portlet in the selected tab
        :param data: the {name:value} dictionary configuration of the current portlet
        
        Having these inputs, update the configuration of the portletin the 
        corresponding tab position form the burst configuration .
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        tab_nr = burst_config.selected_tab
        old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(index_in_tab)]

        # Replace all void entries with 'None'
        for entry in data:
            if data[entry] == '':
                data[entry] = None

        need_relaunch = self.burst_service.update_portlet_configuration(old_portlet_config, data)
        if need_relaunch:
            #### Reset Burst Configuration into an entity not persisted (id = None for all)
            base.add2session(base.KEY_BURST_CONFIG, burst_config.clone())
            return "relaunchView"
        else:
            self.workflow_service.store_workflow_step(old_portlet_config.visualizer)
            return "noRelaunch"


    @cherrypy.expose
    @ajax_call()
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        self._validate_burst_name(burst_name)
        self.burst_service.rename_burst(burst_id, burst_name)


    @cherrypy.expose
    @ajax_call()
    def launch_burst(self, launch_mode, burst_name, **data):
        """
        Do the actual burst launch, using the configuration saved in current session.
        :param launch_mode: new/branch/continue
        :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x)
        :param data: kwargs for simulation input parameters.
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)

        ## Validate new burst-name
        if burst_name != 'none_undefined':
            self._validate_burst_name(burst_name)
            burst_config.name = burst_name

        ## Fill all parameters 
        user_id = base.get_logged_user().id
        data[base.KEY_ADAPTER] = self.cached_simulator_algorithm_id
        burst_config.update_simulator_configuration(data)
        burst_config.fk_project = base.get_current_project().id

        ## Do the asynchronous launch
        burst_id, burst_name = self.burst_service.launch_burst(burst_config, 0, self.cached_simulator_algorithm_id,
                                                               user_id, launch_mode)
        return [burst_id, burst_name]


    @cherrypy.expose
    @ajax_call()
    def load_burst(self, burst_id):
        """
        Given a clicked burst from the history and the selected tab, load all 
        the required data from that burst. Return a value specifying if it was a result
        of a range launch (OperationGroup) or not.
        """
        try:
            old_burst = base.get_from_session(base.KEY_BURST_CONFIG)
            burst, group_gid = self.burst_service.load_burst(burst_id)
            burst.selected_tab = old_burst.selected_tab
            base.add2session(base.KEY_BURST_CONFIG, burst)
            return {'status': burst.status, 'group_gid': group_gid, 'selected_tab': burst.selected_tab}
        except Exception, excep:
            ### Most probably Burst was removed. Delete it from session, so that client 
            ### has a good chance to get a good response on refresh
            self.logger.error(excep)
            base.remove_from_session(base.KEY_BURST_CONFIG)
            raise excep
Пример #13
0
class BurstService():
    """
    Service layer for Burst related entities.
    """
    def __init__(self):
        self.operation_service = OperationService()
        self.workflow_service = WorkflowService()
        self.logger = get_logger(self.__class__.__module__)

    def build_portlet_interface(self, portlet_configuration, project_id):
        """
        From a portlet_id and a project_id, first build the portlet
        entity then get it's configurable interface. 
        
        :param portlet_configuration: a portlet configuration entity. It holds at the
            least the portlet_id, and in case any default parameters were saved
            they can be rebuilt from the analyzers // visualizer parameters
        :param project_id: the id of the current project   
            
        :returns: the portlet interface will be of the following form::
            [{'interface': adapter_interface, 
            'prefix': prefix_for_parameter_names, 
            'subalg': {algorithm_field_name: default_algorithm_value},
            'algo_group': algorithm_group,
            'alg_ui_name': displayname},
            ......]
            A list of dictionaries for each adapter that makes up the portlet.
            
        """
        portlet_entity = dao.get_portlet_by_id(
            portlet_configuration.portlet_id)
        if portlet_entity is None:
            raise InvalidPortletConfiguration(
                "No portlet entity located in database with id=%s. "
                "Portlet configuration %s is not valid." %
                (portlet_configuration.portlet_id, portlet_configuration))
        portlet_configurer = PortletConfigurer(portlet_entity)
        portlet_interface = portlet_configurer.get_configurable_interface()
        self.logger.debug("Created interface for portlet " +
                          str([portlet_entity]))

        for adapter_conf in portlet_interface:
            interface = adapter_conf.interface
            interface = FlowService().prepare_parameters(
                interface, project_id, adapter_conf.group.fk_category)
            interface = ABCAdapter.prepare_param_names(interface,
                                                       adapter_conf.prefix)
            adapter_conf.interface = interface

        portlet_configurer.update_default_values(portlet_interface,
                                                 portlet_configuration)
        portlet_configurer.prefix_adapters_parameters(portlet_interface)

        return portlet_interface

    @staticmethod
    def update_portlet_configuration(portlet_configuration,
                                     submited_parameters):
        """
        :param portlet_configuration: the portlet configuration that needs to be updated
        :param submited_parameters: a list of parameters as submitted from the UI. This 
            is a dictionary in the form : 
            {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}}
            
        All names are prefixed with adapter specific generated prefix.
        """
        portlet_entity = dao.get_portlet_by_id(
            portlet_configuration.portlet_id)
        portlet_configurer = PortletConfigurer(portlet_entity)
        return portlet_configurer.update_portlet_configuration(
            portlet_configuration, submited_parameters)

    @staticmethod
    def new_burst_configuration(project_id):
        """
        Return a new burst configuration entity with all the default values.
        """
        burst_configuration = model.BurstConfiguration(project_id)
        burst_configuration.selected_tab = 0
        BurstService.set_default_portlets(burst_configuration)
        return burst_configuration

    @staticmethod
    def set_default_portlets(burst_configuration):
        """
        Sets the default portlets for the specified burst configuration.
        The default portlets are specified in the __init__.py script from tvb root.
        """
        for tab_idx, value in DEFAULT_PORTLETS.items():
            for sel_idx, portlet_identifier in value.items():
                portlet = BurstService.get_portlet_by_identifier(
                    portlet_identifier)
                if portlet is not None:
                    portlet_configuration = BurstService.new_portlet_configuration(
                        portlet.id, tab_idx, sel_idx,
                        portlet.algorithm_identifier)
                    burst_configuration.set_portlet(tab_idx, sel_idx,
                                                    portlet_configuration)

    @staticmethod
    def _store_burst_config(burst_config):
        """
        Store a burst configuration entity.
        """
        burst_config.prepare_before_save()
        saved_entity = dao.store_entity(burst_config)
        return saved_entity.id

    @staticmethod
    def get_available_bursts(project_id):
        """
        Return all the burst for the current project.
        """
        bursts = dao.get_bursts_for_project(
            project_id, page_size=MAX_BURSTS_DISPLAYED) or []
        for burst in bursts:
            burst.prepare_after_load()
        return bursts

    @staticmethod
    def populate_burst_disk_usage(bursts):
        """
        Adds a disk_usage field to each burst object.
        The disk usage is computed as the sum of the datatypes generated by a burst
        """
        sizes = dao.compute_bursts_disk_size([b.id for b in bursts])
        for b in bursts:
            b.disk_size = format_bytes_human(sizes[b.id])

    @staticmethod
    def rename_burst(burst_id, new_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.name = new_name
        dao.store_entity(burst)

    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst,
                                                       burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(
                    operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid

    @staticmethod
    def __populate_tabs_from_workflow(burst_entity, workflow):
        """
        Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations
        generated from the steps of the workflow.
        """
        visualizers = dao.get_visualization_steps(workflow.id)
        for entry in visualizers:
            ## For each visualize step, also load all of the analyze steps.
            portlet_cfg = PortletConfiguration(entry.fk_portlet)
            portlet_cfg.set_visualizer(entry)
            analyzers = dao.get_workflow_steps_for_position(
                entry.fk_workflow, entry.tab_index, entry.index_in_tab)
            portlet_cfg.set_analyzers(analyzers)
            burst_entity.tabs[entry.tab_index].portlets[
                entry.index_in_tab] = portlet_cfg
        return burst_entity

    def load_tab_configuration(self, burst_entity, op_id):
        """
        Given a burst entity and an operation id, find the workflow to which the op_id
        belongs and the load the burst_entity's tab configuration with those workflow steps.
        """
        originating_workflow = dao.get_workflow_for_operation_id(op_id)
        burst_entity = self.__populate_tabs_from_workflow(
            burst_entity, originating_workflow)
        return burst_entity

    @staticmethod
    def new_portlet_configuration(portlet_id,
                                  tab_nr=-1,
                                  index_in_tab=-1,
                                  portlet_name='Default'):
        """
        Return a new portlet configuration entitiy with default parameters.
        
        :param portlet_id: the id of the portlet for which a configuration will
            be stored
        :param tab_nr: the index of the currently selected tab
        :param index_in_tab: the index from the currently selected tab
        
        """
        portlet_entity = dao.get_portlet_by_id(portlet_id)
        if portlet_entity is None:
            raise InvalidPortletConfiguration(
                "No portlet entity located in database with id=%s." %
                portlet_id)
        portlet_configurer = PortletConfigurer(portlet_entity)
        configuration = portlet_configurer.create_new_portlet_configuration(
            portlet_name)
        for wf_step in configuration.analyzers:
            wf_step.tab_index = tab_nr
            wf_step.index_in_tab = index_in_tab
        configuration.visualizer.tab_index = tab_nr
        configuration.visualizer.index_in_tab = index_in_tab
        return configuration

    @staticmethod
    def get_available_portlets():
        """
        :returns: a list of all the available portlet entites
        """
        return dao.get_available_portlets()

    @staticmethod
    def get_portlet_by_id(portlet_id):
        """
        :returns: the portlet entity with the id =@portlet_id
        """
        return dao.get_portlet_by_id(portlet_id)

    @staticmethod
    def get_portlet_by_identifier(portlet_identifier):
        """
        :returns: the portlet entity with the algorithm identifier =@portlet_identifier
        """
        return dao.get_portlet_by_identifier(portlet_identifier)

    def launch_burst(self,
                     burst_configuration,
                     simulator_index,
                     simulator_id,
                     user_id,
                     launch_mode=LAUNCH_NEW):
        """
        Given a burst configuration and all the necessary data do the actual launch.
        
        :param burst_configuration: BurstConfiguration   
        :param simulator_index: the position within the workflows step list that the simulator will take. This is needed
            so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from.
        :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo
            group and category that are then passed to the launcher's prepare_operation method.
        :param user_id: the id of the user that launched this burst
        :param launch_mode: new/branch/continue
        """
        ## 1. Prepare BurstConfiguration entity
        if launch_mode == LAUNCH_NEW:
            ## Fully new entity for new simulation
            burst_config = burst_configuration.clone()
            if burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                burst_config.name = 'simulation_' + str(new_id)
        else:
            ## Branch or Continue simulation
            burst_config = burst_configuration
            simulation_state = dao.get_generic_entity(
                SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS,
                burst_config.id, "fk_parent_burst")
            if simulation_state is None or len(simulation_state) < 1:
                exc = BurstServiceException(
                    "Simulation State not found for %s, "
                    "thus we are unable to branch from it!" %
                    burst_config.name)
                self.logger.error(exc)
                raise exc

            simulation_state = simulation_state[0]
            burst_config.update_simulation_parameter("simulation_state",
                                                     simulation_state.gid)
            burst_config = burst_configuration.clone()

            count = dao.count_bursts_with_name(burst_config.name,
                                               burst_config.fk_project)
            burst_config.name = burst_config.name + "_" + launch_mode + str(
                count)

        ## 2. Create Operations and do the actual launch
        if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]:
            ## New Burst entry in the history
            burst_id = self._store_burst_config(burst_config)
            thread = threading.Thread(target=self._async_launch_and_prepare,
                                      kwargs={
                                          'burst_config': burst_config,
                                          'simulator_index': simulator_index,
                                          'simulator_id': simulator_id,
                                          'user_id': user_id
                                      })
            thread.start()
            return burst_id, burst_config.name
        else:
            ## Continue simulation
            ## TODO
            return burst_config.id, burst_config.name

    @transactional
    def _prepare_operations(self, burst_config, simulator_index, simulator_id,
                            user_id):
        """
        Prepare all required operations for burst launch.
        """
        project_id = burst_config.fk_project
        burst_id = burst_config.id
        workflow_step_list = []
        starting_index = simulator_index + 1

        sim_algo = FlowService().get_algorithm_by_identifier(simulator_id)
        metadata = {DataTypeMetaData.KEY_BURST: burst_id}
        launch_data = burst_config.get_all_simulator_values()[0]
        operations, group = self.operation_service.prepare_operations(
            user_id, project_id, sim_algo, sim_algo.algo_group.group_category,
            metadata, **launch_data)
        group_launched = group is not None
        if group_launched:
            starting_index += 1

        for tab in burst_config.tabs:
            for portlet_cfg in tab.portlets:
                ### For each portlet configuration stored, update the step index ###
                ### and also change the dynamic parameters step indexes to point ###
                ### to the simulator outputs.                                     ##
                if portlet_cfg is not None:
                    analyzers = portlet_cfg.analyzers
                    visualizer = portlet_cfg.visualizer
                    for entry in analyzers:
                        entry.step_index = starting_index
                        self.workflow_service.set_dynamic_step_references(
                            entry, simulator_index)
                        workflow_step_list.append(entry)
                        starting_index += 1
                    ### Change the dynamic parameters to point to the last adapter from this portlet execution.
                    visualizer.step_visible = False
                    if len(workflow_step_list) > 0 and isinstance(
                            workflow_step_list[-1], model.WorkflowStep):
                        self.workflow_service.set_dynamic_step_references(
                            visualizer, workflow_step_list[-1].step_index)
                    else:
                        self.workflow_service.set_dynamic_step_references(
                            visualizer, simulator_index)
                    ### Only for a single operation have the step of visualization, otherwise is useless.
                    if not group_launched:
                        workflow_step_list.append(visualizer)

        if group_launched:
            ###  For a group of operations, make sure the metric for PSE view
            ### is also computed, immediately after the simulation.
            metric_algo, metric_group = FlowService(
            ).get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE,
                                                MEASURE_METRICS_CLASS)
            _, metric_interface = FlowService().prepare_adapter(
                project_id, metric_group)
            dynamics = {}
            for entry in metric_interface:
                # We have a select that should be the dataType and a select multiple with the
                # required metric algorithms to be evaluated. Only dynamic parameter should be
                # the select type.
                if entry[ABCAdapter.KEY_TYPE] == 'select':
                    dynamics[entry[ABCAdapter.KEY_NAME]] = {
                        WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0,
                        WorkflowStepConfiguration.STEP_INDEX_KEY:
                        simulator_index
                    }
            metric_step = model.WorkflowStep(algorithm_id=metric_algo.id,
                                             step_index=simulator_index + 1,
                                             static_param={},
                                             dynamic_param=dynamics)
            metric_step.step_visible = False
            workflow_step_list.insert(0, metric_step)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id, burst_id, simulator_index, simulator_id, operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, user_id, burst_id, project_id,
            group, operations)
        operation_ids = [operation.id for operation in operations]
        return operation_ids

    def _async_launch_and_prepare(self, burst_config, simulator_index,
                                  simulator_id, user_id):
        """
        Prepare operations asynchronously.
        """
        try:
            operation_ids = self._prepare_operations(burst_config,
                                                     simulator_index,
                                                     simulator_id, user_id)
            self.logger.debug("Starting a total of %s workflows" %
                              (len(operation_ids, )))
            wf_errs = 0
            for operation_id in operation_ids:
                try:
                    OperationService().launch_operation(operation_id, True)
                except Exception, excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.workflow_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " +
                              str(len(operation_ids) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) +
                              " had error on pre-launch steps")
        except Exception, excep:
            self.logger.error(excep)
            self.workflow_service.mark_burst_finished(burst_config,
                                                      error_message=str(excep))
Пример #14
0
class BurstServiceTest(BaseTestCase):
    """
    Test the service layer for BURST PAGE. We can't have this transactional since
    we launch operations in different threads and the transactional operator only rolls back 
    sessions bounded to the current thread transaction.
    """
    PORTLET_ID = "TA1TA2"
    ## This should not be present in portlets.xml
    INVALID_PORTLET_ID = "this_is_not_a_non_existent_test_portlet_ID"

    burst_service = BurstService()
    flow_service = FlowService()
    operation_service = OperationService()
    workflow_service = WorkflowService()
    sim_algorithm = flow_service.get_algorithm_by_module_and_class(
        SIMULATOR_MODULE, SIMULATOR_CLASS)
    local_simulation_params = copy.deepcopy(SIMULATOR_PARAMETERS)

    def setUp(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database()

    def test_new_portlet_configuration(self):
        """
        Test that the correct portlet configuration is generated for the test portlet.
        """
        # Passing an invalid portlet ID should fail and raise an InvalidPortletConfiguration exception.
        self.assertRaises(InvalidPortletConfiguration,
                          self.burst_service.new_portlet_configuration, -1)

        # Now the happy flow
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        analyzers = portlet_configuration.analyzers
        self.assertEqual(
            len(analyzers), 1, "Portlet configuration not build properly."
            " Portlet's analyzers list has unexpected number of elements.")
        self.assertEqual(
            analyzers[0].dynamic_param, {
                u'test_dt_input': {
                    wf_cfg.DATATYPE_INDEX_KEY: 0,
                    wf_cfg.STEP_INDEX_KEY: 0
                }
            }, "Dynamic parameters not loaded properly")
        visualizer = portlet_configuration.visualizer
        self.assertEqual(visualizer.dynamic_param, {},
                         "Dynamic parameters not loaded properly")
        self.assertEqual(visualizer.static_param, {u'test2': u'0'},
                         'Static parameters not loaded properly')

    def test_build_portlet_interface(self):
        """
        Test that the portlet interface is build properly, splitted by steps and prefixed.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        actual_interface = self.burst_service.build_portlet_interface(
            portlet_configuration, self.test_project.id)
        #The expected portlet steps and interface in correspondace to the xml declaration
        #from tvb.tests.framework/core/portlets/test_portlet.xml
        expected_steps = [{
            'ui_name': 'TestAdapterDatatypeInput'
        }, {
            'ui_name': 'TestAdapter2'
        }]
        expected_interface = [{
            ABCAdapter.KEY_DEFAULT:
            'step_0[0]',
            ABCAdapter.KEY_DISABLED:
            True,
            KEY_DYNAMIC:
            True,
            ABCAdapter.KEY_NAME:
            ADAPTER_PREFIX_ROOT + '0test_dt_input'
        }, {
            ABCAdapter.KEY_DEFAULT:
            '0',
            ABCAdapter.KEY_DISABLED:
            False,
            KEY_DYNAMIC:
            False,
            ABCAdapter.KEY_NAME:
            ADAPTER_PREFIX_ROOT + '1test2'
        }]
        for idx, entry in enumerate(expected_steps):
            step = actual_interface[idx]
            for key in entry:
                self.assertEqual(entry.get(key), getattr(step, key))
            for key in expected_interface[idx]:
                self.assertEqual(expected_interface[idx].get(key, False),
                                 step.interface[0].get(key, False))

    def test_build_portlet_interface_invalid(self):
        """
        Test that a proper exception is raised in case an invalid portlet configuration is provided.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        portlet_configuration.portlet_id = "this-is-invalid"
        self.assertRaises(InvalidPortletConfiguration,
                          self.burst_service.build_portlet_interface,
                          portlet_configuration, self.test_project.id)

    def test_update_portlet_config(self):
        """
        Test if a portlet configuration parameters are updated accordingly with a set
        of overwrites that would normally come from UI. Make sure to restart only if 
        analyzer parameters change.
        """
        def __update_params(declared_overwrites, expected_result):
            """
            Do the update and check that we get indeed the expected_result.
            :param declared_overwrites: a input dictionary in the form {'$$name$$' : '$$value$$'}. Make
                sure $$name$$ has the prefix that is added in case of portlet parameters,
                namely ADAPTER_PREFIX_ROOT + step_index + actual_name
            :param expected_result: boolean which should represent if we need or not to restart. (Was a
                visualizer parameter change or an analyzer one)
            """
            result = self.burst_service.update_portlet_configuration(
                portlet_configuration, declared_overwrites)
            self.assertEqual(
                expected_result, result,
                "After update expected %s as 'need_restart' but got %s." %
                (expected_result, result))

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        previous_entry = portlet_configuration.analyzers[0].static_param[
            'test_non_dt_input']
        declared_overwrites = {
            ADAPTER_PREFIX_ROOT + '0test_non_dt_input': previous_entry
        }
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '1test2': 'new_value'}
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': '1'}
        __update_params(declared_overwrites, True)

    def test_update_portlet_config_invalid_data(self):
        """
        Trying an update on a portlet configuration with invalid data
        should not change the configuration instance in any way.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)

        invalid_overwrites = {
            'this_is_not_a_valid_key': 'for_test_portlet_update'
        }
        before_update = copy.deepcopy(portlet_configuration)
        self.burst_service.update_portlet_configuration(
            portlet_configuration, invalid_overwrites)
        self.assertEqual(set(dir(before_update)),
                         set(dir(portlet_configuration)))
        #An update with invalid input data should have no effect on the configuration, but attributes changed
        for key in portlet_configuration.__dict__.keys():
            if hasattr(getattr(portlet_configuration, key), '__call__'):
                self.assertEqual(
                    getattr(before_update, key),
                    getattr(portlet_configuration, key),
                    "The value of attribute %s changed by a update with invalid data "
                    "when it shouldn't have." % key)

    def test_clone_burst_configuration(self):
        """
        Test that all the major attributes are the same after a clone burst but the
        id of the cloned one is None.
        """
        first_burst = TestFactory.store_burst(self.test_project.id)
        cloned_burst = first_burst.clone()
        self._compare_bursts(first_burst, cloned_burst)
        self.assertEqual(first_burst.selected_tab, cloned_burst.selected_tab,
                         "Selected tabs not equal for bursts.")
        self.assertEqual(len(first_burst.tabs), len(cloned_burst.tabs),
                         "Tabs not equal for bursts.")
        self.assertTrue(cloned_burst.id is None,
                        'id should be none for cloned entry.')

    def test_store_burst_config(self):
        """
        Test that a burst entity is properly stored in db.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        self.assertTrue(burst_config.id is not None,
                        'Burst was not stored properly.')
        stored_entity = dao.get_burst_by_id(burst_config.id)
        self.assertTrue(stored_entity is not None,
                        'Burst was not stored properly.')
        self._compare_bursts(burst_config, stored_entity)

    def _compare_bursts(self, first_burst, second_burst):
        """
        Compare that all important attributes are the same between two bursts. (name, project id and status)
        """
        self.assertEqual(first_burst.name, second_burst.name,
                         "Names not equal for bursts.")
        self.assertEqual(first_burst.fk_project, second_burst.fk_project,
                         "Projects not equal for bursts.")
        self.assertEqual(first_burst.status, second_burst.status,
                         "Statuses not equal for bursts.")

    def test_getavailablebursts_none(self):
        """
        Test that an empty list is returned if no data is available in db.
        """
        bursts = self.burst_service.get_available_bursts(self.test_project.id)
        self.assertEqual(bursts, [],
                         "Unexpected result returned : %s" % (bursts, ))

    def test_get_available_bursts_happy(self):
        """
        Test that all the correct burst are returned for the given project.
        """
        project = model.Project("second_test_proj", self.test_user.id,
                                "description")
        second_project = dao.store_entity(project)
        test_project_bursts = [
            TestFactory.store_burst(self.test_project.id).id for _ in xrange(4)
        ]
        second_project_bursts = [
            TestFactory.store_burst(second_project.id).id for _ in xrange(3)
        ]
        returned_test_project_bursts = [
            burst.id for burst in self.burst_service.get_available_bursts(
                self.test_project.id)
        ]
        returned_second_project_bursts = [
            burst.id for burst in self.burst_service.get_available_bursts(
                second_project.id)
        ]
        self.assertEqual(
            len(test_project_bursts), len(returned_test_project_bursts),
            "Incorrect bursts retrieved for project %s." % self.test_project)
        self.assertEqual(
            len(second_project_bursts), len(returned_second_project_bursts),
            "Incorrect bursts retrieved for project %s." % second_project)
        self.assertEqual(
            set(second_project_bursts), set(returned_second_project_bursts),
            "Incorrect bursts retrieved for project %s." % second_project)
        self.assertEqual(
            set(test_project_bursts), set(returned_test_project_bursts),
            "Incorrect bursts retrieved for project %s." % self.test_project)

    def test_select_simulator_inputs(self):
        """
        Test that given a dictionary of selected inputs as it would arrive from UI, only
        the selected simulator inputs are kept.
        """
        simulator_input_tree = self.flow_service.prepare_adapter(
            self.test_project.id, self.sim_algorithm)
        child_parameter = ''
        checked_parameters = {
            simulator_input_tree[0][ABCAdapter.KEY_NAME]: {
                model.KEY_PARAMETER_CHECKED: True,
                model.KEY_SAVED_VALUE: 'new_value'
            },
            simulator_input_tree[1][ABCAdapter.KEY_NAME]: {
                model.KEY_PARAMETER_CHECKED: True,
                model.KEY_SAVED_VALUE: 'new_value'
            }
        }
        #Look for a entry from a subtree to add to the selected simulator inputs
        for idx, entry in enumerate(simulator_input_tree):
            found_it = False
            if idx not in (0, 1) and entry.get(ABCAdapter.KEY_OPTIONS, False):
                for option in entry[ABCAdapter.KEY_OPTIONS]:
                    if option[ABCAdapter.KEY_VALUE] == entry[
                            ABCAdapter.KEY_DEFAULT]:
                        if option[ABCAdapter.KEY_ATTRIBUTES]:
                            child_parameter = option[
                                ABCAdapter.KEY_ATTRIBUTES][0][
                                    ABCAdapter.KEY_NAME]
                            checked_parameters[entry[ABCAdapter.KEY_NAME]] = {
                                model.KEY_PARAMETER_CHECKED:
                                False,
                                model.KEY_SAVED_VALUE:
                                entry[ABCAdapter.KEY_DEFAULT]
                            }
                            checked_parameters[child_parameter] = {
                                model.KEY_PARAMETER_CHECKED: True,
                                model.KEY_SAVED_VALUE: 'new_value'
                            }
                            found_it = True
                            break
            if found_it:
                break
        self.assertTrue(
            child_parameter != '',
            "Could not find any sub-tree entry in simulator interface.")
        subtree = InputTreeManager.select_simulator_inputs(
            simulator_input_tree, checked_parameters)
        #After the select method we expect only the checked parameters entries to remain with
        #the new values updated accordingly.
        expected_outputs = [{
            ABCAdapter.KEY_NAME:
            simulator_input_tree[0][ABCAdapter.KEY_NAME],
            ABCAdapter.KEY_DEFAULT:
            'new_value'
        }, {
            ABCAdapter.KEY_NAME:
            simulator_input_tree[1][ABCAdapter.KEY_NAME],
            ABCAdapter.KEY_DEFAULT:
            'new_value'
        }, {
            ABCAdapter.KEY_NAME: child_parameter,
            ABCAdapter.KEY_DEFAULT: 'new_value'
        }]
        self.assertEqual(
            len(expected_outputs), len(subtree),
            "Some entries that should not have been displayed still are.")
        for idx, entry in enumerate(expected_outputs):
            self.assertEqual(expected_outputs[idx][ABCAdapter.KEY_NAME],
                             subtree[idx][ABCAdapter.KEY_NAME])
            self.assertEqual(expected_outputs[idx][ABCAdapter.KEY_DEFAULT],
                             subtree[idx][ABCAdapter.KEY_DEFAULT],
                             'Default value not update properly.')

    def test_rename_burst(self):
        """
        Test that renaming of a burst functions properly.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        self.burst_service.rename_burst(burst_config.id, "new_burst_name")
        loaded_burst = dao.get_burst_by_id(burst_config.id)
        self.assertEqual(loaded_burst.name, "new_burst_name",
                         "Burst was not renamed properly.")

    def test_load_burst(self):
        """ 
        Test that the load burst works properly. NOTE: this method is also tested
        in the actual burst launch tests. This is just basic test to verify that the simulator
        interface is loaded properly.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        loaded_burst = self.burst_service.load_burst(burst_config.id)[0]
        self.assertEqual(loaded_burst.simulator_configuration, {},
                         "No simulator configuration should have been loaded")
        self.assertEqual(burst_config.fk_project, loaded_burst.fk_project,
                         "Loaded burst different from original one.")
        burst_config = TestFactory.store_burst(
            self.test_project.id, simulator_config={"test": "test"})
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        self.assertEqual(loaded_burst.simulator_configuration,
                         {"test": "test"}, "different burst loaded")
        self.assertEqual(burst_config.fk_project, loaded_burst.fk_project,
                         "Loaded burst different from original one.")

    def test_remove_burst(self):
        """
        Test the remove burst method added to burst_service.
        """
        loaded_burst, _ = self._prepare_and_launch_sync_burst()
        self.burst_service.cancel_or_remove_burst(loaded_burst.id)
        self._check_burst_removed()

    def test_branch_burst(self):
        """
        Test the branching of an existing burst.
        """
        burst_config = self._prepare_and_launch_async_burst(wait_to_finish=60)
        burst_config.prepare_after_load()

        launch_params = self._prepare_simulation_params(4)
        burst_config.update_simulator_configuration(launch_params)

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      self.sim_algorithm.id,
                                                      self.test_user.id,
                                                      "branch")
        burst_config = dao.get_burst_by_id(burst_id)
        self._wait_for_burst(burst_config)

        ts_regions = self.count_all_entities(TimeSeriesRegion)
        sim_states = self.count_all_entities(SimulationState)
        self.assertEqual(
            2, ts_regions,
            "An operation group should have been created for each step.")
        self.assertEqual(
            2, sim_states,
            "An dataType group should have been created for each step.")

    def test_remove_group_burst(self):
        """
        Same remove burst but for a burst that contains group of workflows launched as
        it would be from a Parameter Space Exploration. Check that the workflows are also
        deleted with the burst.
        """
        burst_config = self._prepare_and_launch_async_burst(length=1,
                                                            is_range=True,
                                                            nr_ops=4,
                                                            wait_to_finish=60)

        launched_workflows = dao.get_workflows_for_burst(burst_config.id,
                                                         is_count=True)
        self.assertEqual(
            4, launched_workflows,
            "4 workflows should have been launched due to group parameter.")

        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_config.id)
        self.assertTrue(got_deleted, "Burst should be deleted")

        launched_workflows = dao.get_workflows_for_burst(burst_config.id,
                                                         is_count=True)
        self.assertEqual(0, launched_workflows,
                         "No workflows should remain after delete.")

        burst_config = dao.get_burst_by_id(burst_config.id)
        self.assertTrue(burst_config is None,
                        "Removing a canceled burst should delete it from db.")

    def test_remove_started_burst(self):
        """
        Try removing a started burst, which should result in it getting canceled.
        """
        burst_entity = self._prepare_and_launch_async_burst(length=20000)
        self.assertEqual(
            BurstConfiguration.BURST_RUNNING, burst_entity.status,
            'A 20000 length simulation should still be started immediately after launch.'
        )
        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_entity.id)
        self.assertFalse(got_deleted,
                         "Burst should be cancelled before deleted.")
        burst_entity = dao.get_burst_by_id(burst_entity.id)
        self.assertEqual(
            BurstConfiguration.BURST_CANCELED, burst_entity.status,
            'Deleting a running burst should just cancel it first.')
        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_entity.id)
        self.assertTrue(got_deleted,
                        "Burst should be deleted if status is cancelled.")
        burst_entity = dao.get_burst_by_id(burst_entity.id)
        self.assertTrue(burst_entity is None,
                        "Removing a canceled burst should delete it from db.")

    def test_burst_delete_with_project(self):
        """
        Test that on removal of a project all burst related data is cleared.
        """
        self._prepare_and_launch_sync_burst()
        ProjectService().remove_project(self.test_project.id)
        self._check_burst_removed()

    def test_sync_burst_launch(self):
        """
        A full test for launching a burst. 
        First create the workflow steps and launch the burst.
        Then check that only operation created is for the first adapter from the portlet. The
        second should be viewed as a visualizer.
        After that load the burst and check that the visualizer and analyzer are loaded in the
        corresponding tab and that all the parameters are still the same. Finally check that burst
        status updates corresponding to final operation status.
        """
        loaded_burst, workflow_step_list = self._prepare_and_launch_sync_burst(
        )
        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 1,
            "One operations should have been generated for this burst.")
        self.assertEqual(
            started, 0,
            "No operations should remain started since workflow was launched synchronous."
        )
        self.assertEqual(error, 0, "No operations should return error status.")
        self.assertTrue(loaded_burst.tabs[0].portlets[0] is not None,
                        "Portlet not loaded from config!")
        portlet_config = loaded_burst.tabs[0].portlets[0]
        analyzers = portlet_config.analyzers
        self.assertEqual(
            len(analyzers), 0,
            "Only have 'simulator' and a visualizer. No analyzers should be loaded."
        )
        visualizer = portlet_config.visualizer
        self.assertTrue(visualizer is not None,
                        "Visualizer should not be none.")
        self.assertEqual(visualizer.fk_algorithm,
                         workflow_step_list[0].fk_algorithm,
                         "Different ids after burst load for visualizer.")
        self.assertEqual(
            visualizer.static_param, workflow_step_list[0].static_param,
            "Different static params after burst load for visualizer.")
        self.assertEqual(
            visualizer.dynamic_param, workflow_step_list[0].dynamic_param,
            "Different static params after burst load for visualizer.")

    def test_launch_burst(self):
        """
        Test the launch burst method from burst service.
        """
        first_step_algo = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
        adapter_interface = self.flow_service.prepare_adapter(
            self.test_project.id, first_step_algo)
        ui_submited_simulator_iface_replica = {}
        kwargs_replica = {}
        for entry in adapter_interface:
            ui_submited_simulator_iface_replica[entry[ABCAdapter.KEY_NAME]] = {
                model.KEY_PARAMETER_CHECKED: True,
                model.KEY_SAVED_VALUE: entry[ABCAdapter.KEY_DEFAULT]
            }
            kwargs_replica[entry[ABCAdapter.KEY_NAME]] = entry[
                ABCAdapter.KEY_DEFAULT]
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.simulator_configuration = ui_submited_simulator_iface_replica
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      first_step_algo.id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        self.assertTrue(
            burst_config.status in (BurstConfiguration.BURST_FINISHED,
                                    BurstConfiguration.BURST_RUNNING),
            "Burst not launched successfully!")
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config)

    def test_load_group_burst(self):
        """
        Launch a group adapter and load it afterwards and check that a group_id is properly loaded.
        """
        launch_params = self._prepare_simulation_params(1, True, 3)

        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      self.sim_algorithm.id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        self.assertEqual(
            3, launched_workflows,
            "3 workflows should have been launched due to group parameter.")

        group_id = self.burst_service.load_burst(burst_id)[1]
        self.assertTrue(group_id >= 0, "Should be part of group.")
        datatype_measures = self.count_all_entities(DatatypeMeasure)
        self.assertEqual(3, datatype_measures)

    def test_launch_burst_invalid_simulator_parameters(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Passing invalid kwargs to the 'simulator' component
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1_invalid': '0', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

    def test_launch_burst_invalid_simulator_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) so this should fail
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1': 'asa', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

    def test_launch_burst_invalid_portlet_analyzer_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) and int(test1_val2) so this should be valid
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1': '1', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        #Portlet analyzer tries to do int(input) which should fail
        declared_overwrites = {
            ADAPTER_PREFIX_ROOT + '0test_non_dt_input': 'asa'
        }
        self.burst_service.update_portlet_configuration(
            portlet_configuration, declared_overwrites)
        burst_config.tabs[0].portlets[0] = portlet_configuration

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        burst_config = self._wait_for_burst(burst_config, error_expected=True)

        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_steps = dao.get_workflow_steps(burst_wf.id)
        self.assertTrue(
            len(wf_steps) == 2,
            "Should have exactly 2 wf steps. One for 'simulation' one for portlet analyze operation."
        )
        simulator_op = dao.get_operation_by_id(wf_steps[0].fk_operation)
        self.assertEqual(
            model.STATUS_FINISHED, simulator_op.status,
            "First operation should be simulator which should have 'finished' status."
        )
        portlet_analyze_op = dao.get_operation_by_id(wf_steps[1].fk_operation)
        self.assertEqual(
            portlet_analyze_op.status, model.STATUS_ERROR,
            "Second operation should be portlet analyze step which should have 'error' status."
        )

    def test_launch_group_burst_happy_flow(self):
        """
        Happy flow of launching a burst with a range parameter. Expect to get both and operation
        group and a DataType group for the results of the simulations and for the metric steps.
        """
        burst_config = self._prepare_and_launch_async_burst(length=1,
                                                            is_range=True,
                                                            nr_ops=4,
                                                            wait_to_finish=120)
        if burst_config.status != BurstConfiguration.BURST_FINISHED:
            self.burst_service.stop_burst(burst_config)
            self.fail("Burst should have finished successfully.")

        op_groups = self.count_all_entities(model.OperationGroup)
        dt_groups = self.get_all_entities(model.DataTypeGroup)
        self.assertEqual(
            2, op_groups,
            "An operation group should have been created for each step.")
        self.assertEqual(
            len(dt_groups), 2,
            "An dataType group should have been created for each step.")
        for datatype in dt_groups:
            self.assertEqual(4, datatype.count_results,
                             "Should have 4 datatypes in group")

    def test_launch_group_burst_no_metric(self):
        """
        Test the launch burst method from burst service. Try to launch a burst with test adapter which has
        no metrics associated. This should fail.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)

        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        kwargs_replica = {
            'test1_val1': '[0, 1, 2]',
            'test1_val2': '0',
            model.RANGE_PARAMETER_1: 'test1_val1'
        }
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        self.assertEqual(
            3, launched_workflows,
            "3 workflows should have been launched due to group parameter.")

        op_groups = self.count_all_entities(model.OperationGroup)
        dt_groups = self.count_all_entities(model.DataTypeGroup)
        self.assertEqual(
            5, op_groups,
            "An operation group should have been created for each step.")
        self.assertEqual(
            5, dt_groups,
            "An dataType group should have been created for each step.")

    def test_load_tab_configuration(self):
        """
        Create a burst with some predefined portlets in some known positions. Check that the
        load_tab_configuration method does what it is expected, and we get the portlets in the
        corresponding tab positions.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        SIMULATOR_MODULE = 'tvb.tests.framework.adapters.testadapter1'
        SIMULATOR_CLASS = 'TestAdapter1'
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS).id
        kwargs_replica = {'test1_val1': '0', 'test1_val2': '0'}
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        # Add test_portlet to positions (0,0), (0,1) and (1,0)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        burst_config = self._wait_for_burst(burst_config)
        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_step = dao.get_workflow_steps(burst_wf.id)[0]
        burst_config.prepare_after_load()
        for tab in burst_config.tabs:
            for portlet in tab.portlets:
                self.assertTrue(
                    portlet is None,
                    "Before loading the tab configuration all portlets should be none."
                )
        burst_config = self.burst_service.load_tab_configuration(
            burst_config, wf_step.fk_operation)
        for tab_idx, tab in enumerate(burst_config.tabs):
            for portlet_idx, portlet in enumerate(tab.portlets):
                if (tab_idx == 0
                        and portlet_idx in [0, 1]) or (tab_idx == 1
                                                       and portlet_idx == 0):
                    self.assertTrue(portlet is not None,
                                    "portlet gonfiguration not set")
                    self.assertEqual(test_portlet.id, portlet.portlet_id,
                                     "Unexpected portlet entity loaded.")
                else:
                    self.assertTrue(
                        portlet is None,
                        "Before loading the tab configuration all portlets should be none"
                    )

    def _wait_for_burst(self, burst_config, error_expected=False, timeout=40):
        """
        Method that just waits until a burst configuration is finished or a maximum timeout is reached.

        :param burst_config: the burst configuration that should be waited on
        :param timeout: the maximum number of seconds to wait after the burst
        """
        waited = 0
        while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout:
            sleep(0.5)
            waited += 0.5
            burst_config = dao.get_burst_by_id(burst_config.id)

        if waited > timeout:
            self.burst_service.stop_burst(burst_config)
            self.fail(
                "Timed out waiting for simulations to finish. We will cancel it"
            )

        if error_expected and burst_config.status != BurstConfiguration.BURST_ERROR:
            self.burst_service.stop_burst(burst_config)
            self.fail("Burst should have failed due to invalid input data.")

        if (not error_expected
            ) and burst_config.status != BurstConfiguration.BURST_FINISHED:
            msg = "Burst status should have been FINISH. Instead got %s %s" % (
                burst_config.status, burst_config.error_message)
            self.burst_service.stop_burst(burst_config)
            self.fail(msg)

        return burst_config

    def _prepare_and_launch_async_burst(self,
                                        length=4,
                                        is_range=False,
                                        nr_ops=0,
                                        wait_to_finish=0):
        """
        Launch an asynchronous burst with a simulation having all the default parameters, only the length received as
        a parameters. This is launched with actual simulator and not with a dummy test adapter as replacement.
        :param length: the length of the simulation in milliseconds. This is also used in case we need
            a group burst, in which case we will have `nr_ops` simulations with lengths starting from 
            `length` to `length + nr_ops` milliseconds
        :param is_range: a boolean which switches between a group burst and a non group burst.
            !! even if `is_range` is `True` you still need a non-zero positive `nr_ops` to have an actual group burst
        :param nr_ops: the number of operations in the group burst
        """
        launch_params = self._prepare_simulation_params(
            length, is_range, nr_ops)

        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id = self.burst_service.launch_burst(burst_config, 0,
                                                   self.sim_algorithm.id,
                                                   self.test_user.id)[0]
        burst_config = dao.get_burst_by_id(burst_id)

        __timeout = 15
        __waited = 0
        # Wait a maximum of 15 seconds for the burst launch to be performed
        while dao.get_workflows_for_burst(
                burst_config.id, is_count=True) == 0 and __waited < __timeout:
            sleep(0.5)
            __waited += 0.5

        if wait_to_finish:
            burst_config = self._wait_for_burst(burst_config,
                                                timeout=wait_to_finish)
        return burst_config

    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)
        view_step = TestFactory.create_workflow_step(
            "tvb.tests.framework.adapters.testadapter2",
            "TestAdapter2", {"test2": 2}, {},
            0,
            0,
            0,
            0,
            is_view_step=True)
        view_step.fk_portlet = test_portlet.id
        workflow_step_list.append(view_step)

        workflows = self.workflow_service.create_and_store_workflow(
            self.test_project.id, burst_config.id, 0, first_step_algorithm.id,
            operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list

    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        self.assertEqual(0, len(remaining_bursts), "Burst was not deleted")
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        self.assertEqual(0, ops_number, "Operations were not deleted.")
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertEqual(0, len(datatypes))

        wf_steps = self.count_all_entities(model.WorkflowStep)
        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        self.assertEqual(0, wf_steps, "Workflow steps were not deleted.")
        self.assertEqual(
            0, datatype1_stored,
            "Specific datatype entries for DataType1 were not deleted.")
        self.assertEqual(
            0, datatype2_stored,
            "Specific datatype entries for DataType2 were not deleted.")

    def _add_portlets_to_burst(self, burst_config, portlet_dict):
        """
        Adds portlets to a burst config in certain tab position as received
        from a properly syntaxed list of dictionaries.
        :param burst_config: the burst configuration to which the portlet will be added
        :param portlet_dict: a list of dictionaries in the form
                { 'portlet_id' : [(tab_idx, idx_in_tab), (tab_idx1, idx_in_tab2), ...]
        NOTE: This will overwrite any portlets that are added to the burst in any of the positions
        received in parameter `portlet_dict`
        """
        for prt_id in portlet_dict:
            positions = portlet_dict[prt_id]
            for pos in positions:
                burst_config.tabs[pos[0]].portlets[
                    pos[1]] = self.burst_service.new_portlet_configuration(
                        prt_id, pos[0], pos[1])

    def _prepare_simulation_params(self, length, is_range=False, no_ops=0):

        connectivity = self._burst_create_connectivity()

        launch_params = self.local_simulation_params
        launch_params['connectivity'] = connectivity.gid
        if is_range:
            launch_params['simulation_length'] = str(
                range(length, length + no_ops))
            launch_params[model.RANGE_PARAMETER_1] = 'simulation_length'
        else:
            launch_params['simulation_length'] = str(length)
            launch_params[model.RANGE_PARAMETER_1] = None

        return launch_params

    def _burst_create_connectivity(self):
        """
        Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }

        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         self.sim_algorithm.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((74, 74))
        connectivity.centres = numpy.ones((74, 3))
        adapter_instance = StoreAdapter([connectivity])
        self.operation_service.initiate_prelaunch(self.operation,
                                                  adapter_instance, {})
        return connectivity
Пример #15
0
class BurstController(BurstBaseController):
    """
    Controller class for Burst-Pages.
    """
    def __init__(self):
        BurstBaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree and Algorithm for performance issues.
        self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)

    @property
    @context_selected
    def cached_simulator_input_tree(self):
        """
        Cache Simulator's input tree, for performance issues.
        Anyway, without restart, the introspected tree will not be different on multiple executions.
        :returns: Simulator's Input Tree (copy from cache or just loaded)
        """
        cached_simulator_tree = common.get_from_session(
            common.KEY_CACHED_SIMULATOR_TREE)
        if cached_simulator_tree is None:
            cached_simulator_tree = self.flow_service.prepare_adapter(
                common.get_current_project().id,
                self.cached_simulator_algorithm)
            common.add2session(common.KEY_CACHED_SIMULATOR_TREE,
                               cached_simulator_tree)
        return copy.deepcopy(cached_simulator_tree)

    @expose_page
    @settings
    @context_selected
    def index(self):
        """Get on burst main page"""
        # todo : reuse load_burst here for consistency.
        template_specification = dict(
            mainContent="burst/main_burst",
            title="Simulation Cockpit",
            baseUrl=TvbProfile.current.web.BASE_URL,
            includedResources='project/included_resources')
        portlets_list = self.burst_service.get_available_portlets()
        session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_stored_burst is None or session_stored_burst.id is None:
            if session_stored_burst is None:
                session_stored_burst = self.burst_service.new_burst_configuration(
                    common.get_current_project().id)
                common.add2session(common.KEY_BURST_CONFIG,
                                   session_stored_burst)

            adapter_interface = self.cached_simulator_input_tree
            if session_stored_burst is not None:
                current_data = session_stored_burst.get_all_simulator_values(
                )[0]
                adapter_interface = InputTreeManager.fill_defaults(
                    adapter_interface, current_data, True)
                ### Add simulator tree to session to be available in filters
                self.context.add_adapter_to_session(
                    self.cached_simulator_algorithm, adapter_interface,
                    current_data)
            template_specification['inputList'] = adapter_interface

        selected_portlets = session_stored_burst.update_selected_portlets()
        template_specification[
            'burst_list'] = self.burst_service.get_available_bursts(
                common.get_current_project().id)
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(
            selected_portlets)
        template_specification['draw_hidden_ranges'] = True
        template_specification['burstConfig'] = session_stored_burst

        ### Prepare PSE available metrics
        ### We put here all available algorithms, because the metrics select area is a generic one,
        ### and not loaded with every Burst Group change in history.
        algorithm = self.flow_service.get_algorithm_by_module_and_class(
            MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        adapter_instance = ABCAdapter.build_adapter(algorithm)
        if adapter_instance is not None and hasattr(adapter_instance,
                                                    'available_algorithms'):
            template_specification['available_metrics'] = [
                metric_name for metric_name in
                adapter_instance.available_algorithms.keys()
            ]
        else:
            template_specification['available_metrics'] = []

        template_specification[common.KEY_PARAMETERS_CONFIG] = False
        template_specification[common.KEY_SECTION] = 'burst'
        return self.fill_default_attributes(template_specification)

    @expose_fragment('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        bursts = self.burst_service.get_available_bursts(
            common.get_current_project().id)
        self.burst_service.populate_burst_disk_usage(bursts)
        return {'burst_list': bursts, 'selectedBurst': session_burst.id}

    @cherrypy.expose
    @handle_error(redirect=False)
    def get_selected_burst(self):
        """
        Return the burst that is currently stored in session.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_burst.id:
            return str(session_burst.id)
        else:
            return 'None'

    @expose_fragment('burst/portlet_configure_parameters')
    def get_portlet_configurable_interface(self, index_in_tab):
        """
        From the position given by the tab index and the index from that tab, 
        get the portlet configuration and build the configurable interface
        for that portlet.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_index = burst_config.selected_tab
        portlet_config = burst_config.tabs[tab_index].portlets[int(
            index_in_tab)]
        portlet_interface = self.burst_service.build_portlet_interface(
            portlet_config,
            common.get_current_project().id)

        full_portlet_input_tree = []
        for entry in portlet_interface:
            full_portlet_input_tree.extend(entry.interface)
        self.context.add_portlet_to_session(full_portlet_input_tree)

        portlet_interface = {
            "adapters_list": portlet_interface,
            common.KEY_PARAMETERS_CONFIG: False,
            common.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION
        }
        return self.fill_default_attributes(portlet_interface)

    @expose_fragment('burst/portlets_preview')
    def portlet_tab_display(self, **data):
        """
        When saving a new configuration of tabs, check if any of the old 
        portlets are still present, and if that is the case use their 
        parameters configuration. 
        
        For all the new portlets add entries in the burst configuration. 
        Also remove old portlets that are no longer saved.
        """
        tab_portlets_list = json.loads(data['tab_portlets_list'])
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        selected_tab_idx = burst_config.selected_tab
        for tab_idx in range(len(tab_portlets_list)):
            current_tab = burst_config.tabs[tab_idx]
            ### When configuration already exists, and new portlets          #####
            ### are selected, first check if any configuration was saved for #####
            ### each portlet and if that is the case, use it. If none is present #
            ### create a new one.                                              ###
            for idx_in_tab in range(len(tab_portlets_list[tab_idx])):
                portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0]
                portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1]
                if portlet_id >= 0:
                    saved_config = current_tab.portlets[idx_in_tab]
                    if saved_config is None or saved_config.portlet_id != portlet_id:
                        current_tab.portlets[
                            idx_in_tab] = self.burst_service.new_portlet_configuration(
                                portlet_id, tab_idx, idx_in_tab, portlet_name)
                    else:
                        saved_config.visualizer.ui_name = portlet_name
                else:
                    current_tab.portlets[idx_in_tab] = None
            #For generating the HTML get for each id the corresponding portlet
        selected_tab_portlets = []
        saved_selected_tab = burst_config.tabs[selected_tab_idx]
        for portlet in saved_selected_tab.portlets:
            if portlet:
                portlet_id = int(portlet.portlet_id)
                portlet_entity = self.burst_service.get_portlet_by_id(
                    portlet_id)
                portlet_entity.name = portlet.name
                selected_tab_portlets.append(portlet_entity)

        return {'portlet_tab_list': selected_tab_portlets}

    @expose_fragment('burst/portlets_preview')
    def get_configured_portlets(self):
        """
        Return the portlets for one given tab. This is used when changing
        from tab to tab and selecting which portlets will be displayed.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        if burst_config is None:
            return {'portlet_tab_list': []}

        tab_idx = burst_config.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets:
            if portlet_cfg is not None:
                portlet_entity = self.burst_service.get_portlet_by_id(
                    portlet_cfg.portlet_id)
                portlet_entity.name = portlet_cfg.name
                tab_portlet_list.append(portlet_entity)
        return {'portlet_tab_list': tab_portlet_list}

    @expose_json
    def change_selected_tab(self, tab_nr):
        """
        Set :param tab_nr: as the currently selected tab in the stored burst
        configuration. 
        """
        common.get_from_session(
            common.KEY_BURST_CONFIG).selected_tab = int(tab_nr)

    @expose_json
    def get_portlet_session_configuration(self):
        """
        Get the current configuration of portlets stored in session for this burst,
        as a json.
        """
        burst_entity = common.get_from_session(common.KEY_BURST_CONFIG)
        returned_configuration = burst_entity.update_selected_portlets()
        return returned_configuration

    @cherrypy.expose
    @handle_error(redirect=False)
    def save_parameters(self, index_in_tab, **data):
        """
        Save parameters
        
        :param tab_nr: the index of the selected tab
        :param index_in_tab: the index of the configured portlet in the selected tab
        :param data: the {"portlet_parameters": json_string} Where json_string is a Jsonified dictionary
            {"name": value}, representing the configuration of the current portlet
        
        Having these inputs, current method updated the configuration of the portlet in the
        corresponding tab position form the burst configuration in session.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_nr = burst_config.selected_tab
        old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(
            index_in_tab)]
        data = json.loads(data['portlet_parameters'])

        # Replace all void entries with 'None'
        for entry in data:
            if data[entry] == '':
                data[entry] = None

        need_relaunch = self.burst_service.update_portlet_configuration(
            old_portlet_config, data)
        if need_relaunch:
            #### Reset Burst Configuration into an entity not persisted (id = None for all)
            common.add2session(common.KEY_BURST_CONFIG, burst_config.clone())
            return "relaunchView"
        else:
            self.workflow_service.store_workflow_step(
                old_portlet_config.visualizer)
            return "noRelaunch"

    @expose_json
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        validation_result = self._is_burst_name_ok(burst_name)
        if validation_result is True:
            self.burst_service.rename_burst(burst_id, burst_name)
            return {'success': "Simulation successfully renamed!"}
        else:
            return {'error': validation_result}

    @expose_json
    def launch_burst(self, launch_mode, burst_name, **data):
        """
        Do the actual burst launch, using the configuration saved in current session.
        :param launch_mode: new/branch/continue
        :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x)
        :param data: kwargs for simulation input parameters.
        """
        data = json.loads(data['simulator_parameters'])
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)

        ## Validate new burst-name
        if launch_mode == LAUNCH_NEW and burst_name != 'none_undefined':
            validation_result = self._is_burst_name_ok(burst_name)
            if validation_result is True:
                burst_config.name = burst_name
            else:
                return {'error': validation_result}

        ## Fill all parameters
        user_id = common.get_logged_user().id
        data[common.KEY_ADAPTER] = self.cached_simulator_algorithm.id
        burst_config.update_simulator_configuration(data)
        burst_config.fk_project = common.get_current_project().id

        ## Do the asynchronous launch
        try:
            burst_id, burst_name = self.burst_service.launch_burst(
                burst_config, 0, self.cached_simulator_algorithm.id, user_id,
                launch_mode)
            return {'id': burst_id, 'name': burst_name}
        except BurstServiceException as e:
            self.logger.exception("Could not launch burst!")
            return {'error': e.message}

    @expose_json
    def load_burst(self, burst_id):
        """
        Given a burst id return its running status, weather it was a operation group and the selected tab.
        This is called when a burst is selected in the history,
        when returning from a burst config page (model param or noise)
        and when the status of running simulations is polled.
        Besides returning these values it updates the session stored burst.

        A burst configuration has 2 meanings.
        It is a staging configuration for a new burst (stored in transients in the session).
        It is the configuration used to launch a simulation and it's running status (stored in the db).
        This method has to merge the two meanings.
        If the requested burst_id is different from the one held in the session,
        then the burst config is loaded from the db, discarding any session stored config.
        If the id is the same then the session config is kept.
        """
        try:
            burst_id = int(burst_id)
            old_burst = common.get_from_session(common.KEY_BURST_CONFIG)
            burst, group_gid = self.burst_service.load_burst(burst_id)

            if old_burst and old_burst.id == burst_id:
                # This function was called to reload the current burst.
                # Merge session config into the db config. Overwrite all transient fields
                burst.simulator_configuration = old_burst.simulator_configuration
                burst.dynamic_ids = old_burst.dynamic_ids

            burst.selected_tab = old_burst.selected_tab
            common.add2session(common.KEY_BURST_CONFIG, burst)
            return {
                'status': burst.status,
                'group_gid': group_gid,
                'selected_tab': burst.selected_tab
            }
        except Exception:
            ### Most probably Burst was removed. Delete it from session, so that client
            ### has a good chance to get a good response on refresh
            self.logger.exception("Error loading burst")
            common.remove_from_session(common.KEY_BURST_CONFIG)
            raise

    @expose_json
    def get_history_status(self, **data):
        """
        For each burst id received, get the status and return it.
        """
        return self.burst_service.update_history_status(
            json.loads(data['burst_ids']))

    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel or Remove the burst entity given by burst_id.
        :returns 'reset-new': When currently selected burst was removed. JS will need to reset selection to a new entry
        :returns 'canceled': When current burst was still running and was just stopped.
        :returns 'done': When no action is required on the client.
        """
        burst_id = int(burst_id)
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        removed = self.burst_service.cancel_or_remove_burst(burst_id)
        if removed:
            if session_burst.id == burst_id:
                return "reset-new"
            return 'done'
        else:
            # Burst was stopped since it was running
            return 'canceled'

    @expose_json
    def get_selected_portlets(self):
        """
        Get the selected portlets for the loaded burst.
        """
        burst = common.get_from_session(common.KEY_BURST_CONFIG)
        return burst.update_selected_portlets()

    @cherrypy.expose
    @handle_error(redirect=False)
    def get_visualizers_for_operation_id(self, op_id, width, height):
        """
        Method called from parameters exploration page in case a burst with a range of parameters
        for the simulator was launched. 
        :param op_id: the selected operation id from the parameter space exploration.
        :param width: the width of the right side display
        :param height: the height of the right side display
        
        Given these parameters first get the workflow to which op_id belongs, then load the portlets
        from that workflow as the current burst configuration. Width and height are used to get the
        proper sizes for the visualization iFrames. 
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        burst_config = self.burst_service.load_tab_configuration(
            burst_config, op_id)
        common.add2session(common.KEY_BURST_CONFIG, burst_config)
        return self.load_configured_visualizers(width, height)

    @expose_fragment("burst/portlets_view")
    def load_configured_visualizers(self, width='800', height='600'):
        """
        Load all the visualization steps for this tab. Width and height represent
        the dimensions of the right side Div, so that we can compute for each iFrame
        the maximum size it's visualizer can take.
        """
        burst = common.get_from_session(common.KEY_BURST_CONFIG)
        selected_tab = burst.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst.tabs[int(selected_tab)].portlets:
            if portlet_cfg is not None:
                tab_portlet_list.append(
                    self.__portlet_config2portlet_entity(portlet_cfg))
        return {
            'status': burst.status,
            'portlet_tab_list': tab_portlet_list,
            'max_width': int(width),
            'max_height': int(height),
            'model': tvb.core.entities.model
        }

    @expose_fragment("burst/portlet_visualization_template")
    def check_status_for_visualizer(self,
                                    selected_tab,
                                    index_in_tab,
                                    width='800',
                                    height='600'):
        """
        This call is used to check on a regular basis if the data for a certain portlet is 
        available for visualization. Should return the status and the HTML to be displayed.
        """
        burst = common.get_from_session(common.KEY_BURST_CONFIG)
        target_portlet = burst.tabs[int(selected_tab)].portlets[int(
            index_in_tab)]
        target_portlet = self.__portlet_config2portlet_entity(target_portlet)
        template_dict = {
            'portlet_entity': target_portlet,
            'model': tvb.core.entities.model,
            'width': int(width),
            'height': int(height)
        }
        return template_dict

    @expose_json
    def reset_burst(self):
        """
        Called when click on "New Burst" entry happens from UI.
        This will generate an empty new Burst Configuration.
        """
        common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE)
        new_burst = self.burst_service.new_burst_configuration(
            common.get_current_project().id)
        common.add2session(common.KEY_BURST_CONFIG, new_burst)

    @cherrypy.expose
    @handle_error(redirect=False)
    def copy_burst(self, burst_id):
        """
        When currently selected entry is a valid Burst, create a clone of that Burst.
        """
        common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE)
        base_burst = self.burst_service.load_burst(burst_id)[0]
        if (base_burst is None) or (base_burst.id is None):
            return self.reset_burst()
        common.add2session(common.KEY_BURST_CONFIG, base_burst.clone())
        return base_burst.name

    @expose_fragment("burst/base_portlets_iframe")
    def launch_visualization(self, index_in_tab, frame_width, frame_height):
        """
        Launch the visualization for this tab and index in tab. The width and height represent the maximum of the inner 
        visualization canvas so that it can fit in the iFrame.
        """
        result = {}
        try:
            burst = common.get_from_session(common.KEY_BURST_CONFIG)
            visualizer = burst.tabs[burst.selected_tab].portlets[int(
                index_in_tab)].visualizer
            result = self.burst_service.launch_visualization(
                visualizer, float(frame_width), float(frame_height), True)[0]
            result['launch_success'] = True
        except Exception as ex:
            result['launch_success'] = False
            result['error_msg'] = str(ex)
            self.logger.exception("Could not launch Portlet Visualizer...")

        return self.fill_default_attributes(result)

    @expose_fragment("flow/genericAdapterFormFields")
    def configure_simulator_parameters(self):
        """
        Return the required input tree to generate the simulator interface for
        the burst page in 'configuration mode', meaning with checkboxes next to
        each input that are checked or not depending on if the user selected 
        them so, and with the user filled defaults.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        default_values, any_checked = burst_config.get_all_simulator_values()
        simulator_input_tree = self.cached_simulator_input_tree
        simulator_input_tree = InputTreeManager.fill_defaults(
            simulator_input_tree, default_values)
        ### Add simulator tree to session to be available in filters
        self.context.add_adapter_to_session(self.cached_simulator_algorithm,
                                            simulator_input_tree,
                                            default_values)

        template_vars = {}
        self.fill_default_attributes(template_vars)
        template_vars.update({
            "inputList":
            simulator_input_tree,
            common.KEY_PARAMETERS_CONFIG:
            True,
            'none_checked':
            not any_checked,
            'selectedParametersDictionary':
            burst_config.simulator_configuration
        })
        return template_vars

    @expose_fragment("flow/genericAdapterFormFields")
    def get_reduced_simulator_interface(self):
        """
        Get a simulator interface that only contains the inputs that are marked
        as KEY_PARAMETER_CHECKED in the current session.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        simulator_config = burst_config.simulator_configuration
        ## Fill with stored defaults, and see if any parameter was checked by user ##
        default_values, any_checked = burst_config.get_all_simulator_values()
        simulator_input_tree = self.cached_simulator_input_tree
        simulator_input_tree = InputTreeManager.fill_defaults(
            simulator_input_tree, default_values)
        ## In case no values were checked just skip tree-cut part and show entire simulator tree ##
        if any_checked:
            simulator_input_tree = InputTreeManager.select_simulator_inputs(
                simulator_input_tree, simulator_config)

        ### Add simulator tree to session to be available in filters
        self.context.add_adapter_to_session(self.cached_simulator_algorithm,
                                            simulator_input_tree,
                                            default_values)

        template_specification = {
            "inputList": simulator_input_tree,
            common.KEY_PARAMETERS_CONFIG: False,
            'draw_hidden_ranges': True
        }
        return self.fill_default_attributes(template_specification)

    @expose_json
    def get_previous_selected_rangers(self):
        """
        Retrieve Rangers, if any previously selected in Burst.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        first_range, second_range = '0', '0'
        if burst_config is not None:
            first_range = burst_config.get_simulation_parameter_value(
                RANGE_PARAMETER_1) or '0'
            second_range = burst_config.get_simulation_parameter_value(
                RANGE_PARAMETER_2) or '0'
        return [first_range, second_range]

    @expose_json
    def save_simulator_configuration(self, exclude_ranges, **data):
        """
        :param exclude_ranges: should be a boolean value. If it is True than the
            ranges will be excluded from the simulation parameters.

        Data is a dictionary with pairs in one of the forms:
            { 'simulator_parameters' : { $name$ : { 'value' : $value$, 'is_disabled' : true/false } },
              'burstName': $burst_name}
        
        The names for the checkboxes next to the parameter with name $name$ is always $name$_checked
        Save this dictionary in an easy to process form from which you could
        rebuild either only the selected entries, or all of the simulator tree
        with the given default values.
        """
        exclude_ranges = string2bool(str(exclude_ranges))
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        if BURST_NAME in data:
            burst_config.name = data[BURST_NAME]
        data = json.loads(data['simulator_parameters'])
        for entry in data:
            if exclude_ranges and (entry.endswith("_checked")
                                   or entry == RANGE_PARAMETER_1
                                   or entry == RANGE_PARAMETER_2):
                continue
            burst_config.update_simulation_parameter(entry, data[entry])
            checkbox_for_entry = entry + "_checked"
            if checkbox_for_entry in data:
                burst_config.update_simulation_parameter(
                    entry, data[checkbox_for_entry], KEY_PARAMETER_CHECKED)

    @expose_page
    @context_selected
    @settings
    def launch_full_visualizer(self, index_in_tab):
        """
        Launch the full scale visualizer from a small preview from the burst cockpit.
        """
        burst = common.get_from_session(common.KEY_BURST_CONFIG)
        selected_tab = burst.selected_tab
        visualizer = burst.tabs[selected_tab].portlets[int(
            index_in_tab)].visualizer
        result, input_data = self.burst_service.launch_visualization(
            visualizer, is_preview=False)
        algorithm = self.flow_service.get_algorithm_by_identifier(
            visualizer.fk_algorithm)

        if common.KEY_TITLE not in result:
            result[common.KEY_TITLE] = algorithm.displayname

        result[common.KEY_ADAPTER] = algorithm.id
        result[common.KEY_OPERATION_ID] = None
        result[common.KEY_INCLUDE_RESOURCES] = 'flow/included_resources'
        ## Add required field to input dictionary and return it so that it can be used ##
        ## for top right control.                                                    ####
        input_data[common.KEY_ADAPTER] = algorithm.id

        if common.KEY_PARENT_DIV not in result:
            result[common.KEY_PARENT_DIV] = ''
        self.context.add_adapter_to_session(algorithm, None,
                                            copy.deepcopy(input_data))

        self._populate_section(algorithm, result, True)
        result[common.KEY_DISPLAY_MENU] = True
        result[common.KEY_BACK_PAGE] = "/burst"
        result[common.KEY_SUBMIT_LINK] = self.get_url_adapter(
            algorithm.fk_category, algorithm.id, 'burst')
        if KEY_CONTROLLS not in result:
            result[KEY_CONTROLLS] = ''
        return self.fill_default_attributes(result)

    def __portlet_config2portlet_entity(self, portlet_cfg):
        """
        From a portlet configuration as it is stored in session, update status and add the index in 
        tab so we can properly display it in the burst page.
        """
        portlet_entity = self.burst_service.get_portlet_by_id(
            portlet_cfg.portlet_id)
        portlet_status, error_msg = self.burst_service.get_portlet_status(
            portlet_cfg)
        portlet_entity.error_msg = error_msg
        portlet_entity.status = portlet_status
        portlet_entity.name = portlet_cfg.name
        portlet_entity.index_in_tab = portlet_cfg.index_in_tab
        portlet_entity.td_gid = generate_guid()
        return portlet_entity

    def _is_burst_name_ok(self, burst_name):
        """
        Validate a new burst name, to have only plain text.
        :returns: True, when validation succeeds, and an error message otherwise.
        """
        try:
            form = BurstNameForm()
            form.to_python({'burst_name': burst_name})
            return True
        except formencode.Invalid:
            validation_error = "Invalid simulation name %s. Please use only letters, numbers, or _ " % str(
                burst_name)
            self.logger.exception(validation_error)
            return validation_error

    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def export(self, burst_id):
        export_manager = ExportManager()
        export_json = export_manager.export_burst(burst_id)

        result_name = "tvb_simulation_" + str(burst_id) + ".json"
        return serve_fileobj(export_json, "application/x-download",
                             "attachment", result_name)

    @expose_fragment("overlay")
    def get_upload_overlay(self):
        template_specification = self.fill_overlay_attributes(
            None, "Upload", "Simulation JSON", "burst/upload_burst_overlay",
            "dialog-upload")
        return self.fill_default_attributes(template_specification)

    @cherrypy.expose
    @handle_error(redirect=True)
    @check_user
    @settings
    def load_burst_from_json(self, **data):
        """Upload Burst from previously exported JSON file"""
        self.logger.debug("Uploading ..." + str(data))

        try:
            upload_param = "uploadedfile"
            if upload_param in data and data[upload_param]:

                upload_param = data[upload_param]
                if isinstance(upload_param, FieldStorage) or isinstance(
                        upload_param, Part):
                    if not upload_param.file:
                        raise BurstServiceException(
                            "Please select a valid JSON file.")
                    upload_param = upload_param.file.read()

                upload_param = json.loads(upload_param)
                prj_id = common.get_current_project().id
                importer = ImportService()
                burst_entity = importer.load_burst_entity(upload_param, prj_id)
                common.add2session(common.KEY_BURST_CONFIG, burst_entity)

        except Exception as excep:
            self.logger.warning(excep.message)
            common.set_error_message(excep.message)

        raise cherrypy.HTTPRedirect('/burst/')
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.workflow_service = WorkflowService()
     self.file_helper = FilesHelper()
Пример #17
0
class OperationService:
    """
    Class responsible for preparing an operation launch. 
    It will prepare parameters, and decide if the operation is to be executed
    immediately, or to be sent on the cluster.
    """
    ATT_UID = "uid"


    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.workflow_service = WorkflowService()
        self.file_helper = FilesHelper()


    ##########################################################################################
    ######## Methods related to launching operations start here ##############################
    ##########################################################################################

    def initiate_operation(self, current_user, project_id, adapter_instance,
                           temporary_storage, visible=True, **kwargs):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).
        
        Invoke custom method on an Adapter Instance. Make sure when the  
        operation has finished that the correct results are stored into DB. 
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        # Prepare Files parameters
        files = {}
        kw2 = copy(kwargs)
        for i, j in six.iteritems(kwargs):
            if isinstance(j, FieldStorage) or isinstance(j, Part):
                files[i] = j
                del kw2[i]

        temp_files = {}
        try:
            for i, j in six.iteritems(files):
                if j.file is None:
                    kw2[i] = None
                    continue
                uq_name = utils.date2string(datetime.now(), True) + '_' + str(i)
                # We have to add original file name to end, in case file processing
                # involves file extension reading
                file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename
                file_name = os.path.join(temporary_storage, file_name)
                kw2[i] = file_name
                temp_files[i] = file_name
                with open(file_name, 'wb') as file_obj:
                    file_obj.write(j.file.read())
                self.logger.debug("Will store file:" + file_name)
            kwargs = kw2
        except Exception as excep:
            self._handle_exception(excep, temp_files, "Could not launch operation: invalid input files!")

        ### Store Operation entity. 
        algo = adapter_instance.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        operations = self.prepare_operations(current_user.id, project_id, algo, algo_category,
                                             {}, visible, **kwargs)[0]

        if isinstance(adapter_instance, ABCSynchronous):
            if len(operations) > 1:
                raise LaunchException("Synchronous operations are not supporting ranges!")
            if len(operations) < 1:
                self.logger.warning("No operation was defined")
                raise LaunchException("Invalid empty Operation!!!")
            return self.initiate_prelaunch(operations[0], adapter_instance, temp_files, **kwargs)
        else:
            return self._send_to_cluster(operations, adapter_instance, current_user.username)


    @staticmethod
    def _prepare_metadata(initial_metadata, algo_category, operation_group, submit_data):
        """
        Gather metadata from submitted fields and current to be execute algorithm.
        Will populate STATE, GROUP in metadata
        """
        metadata = copy(initial_metadata)

        user_group = None
        if DataTypeMetaData.KEY_OPERATION_TAG in submit_data:
            user_group = submit_data[DataTypeMetaData.KEY_OPERATION_TAG]

        if operation_group is not None:
            metadata[DataTypeMetaData.KEY_OPERATION_TAG] = operation_group.name

        if DataTypeMetaData.KEY_TAG_1 in submit_data:
            metadata[DataTypeMetaData.KEY_TAG_1] = submit_data[DataTypeMetaData.KEY_TAG_1]

        metadata[DataTypeMetaData.KEY_STATE] = algo_category.defaultdatastate

        return metadata, user_group


    @staticmethod
    def _read_set(values):
        """ Parse a committed UI possible list of values, into a set converted into string."""
        if isinstance(values, list):
            set_values = []
            values_str = ""
            for val in values:
                if val not in set_values:
                    set_values.append(val)
                    values_str = values_str + " " + str(val)
            values = values_str
        return str(values).strip()


    def group_operation_launch(self, user_id, project_id, algorithm_id, category_id, existing_dt_group=None, **kwargs):
        """
        Create and prepare the launch of a group of operations.
        """
        category = dao.get_category_by_id(category_id)
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        ops, _ = self.prepare_operations(user_id, project_id, algorithm, category, {},
                                         existing_dt_group=existing_dt_group, **kwargs)
        for operation in ops:
            self.launch_operation(operation.id, True)


    def prepare_operations(self, user_id, project_id, algorithm, category, metadata,
                           visible=True, existing_dt_group=None, **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        :param metadata: Initial MetaData with potential Burst identification inside.
        """
        operations = []

        available_args, group = self._prepare_group(project_id, existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException("Too big range specified. You should limit the"
                                  " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." % len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        metadata, user_group = self._prepare_metadata(metadata, category, group, kwargs)

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")")

        visible_operation = visible and category.display is False
        meta_str = json.dumps(metadata)
        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = model.Operation(user_id, project_id, algorithm.id,
                                        json.dumps(one_set_of_args, cls=MapAsJson.MapAsJsonEncoder), meta_str,
                                        op_group_id=group_id, user_group=user_group, range_values=range_values)
            operation.visible = visible_operation
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            burst_id = None
            if DataTypeMetaData.KEY_BURST in metadata:
                burst_id = metadata[DataTypeMetaData.KEY_BURST]
            if existing_dt_group is None:
                datatype_group = model.DataTypeGroup(group, operation_id=operations[0].id, fk_parent_burst=burst_id,
                                                     state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        return operations, group


    def prepare_operations_for_workflowsteps(self, workflow_step_list, workflows, user_id, burst_id,
                                             project_id, group, sim_operations):
        """
        Create and store Operation entities from a list of Workflow Steps.
        Will be generated workflows x workflow_step_list Operations.
        For every step in workflow_step_list one OperationGroup and one DataTypeGroup will be created 
        (in case of PSE).
        """

        for step in workflow_step_list:
            operation_group = None
            if (group is not None) and not isinstance(step, model.WorkflowStepView):
                operation_group = model.OperationGroup(project_id=project_id, ranges=group.range_references)
                operation_group = dao.store_entity(operation_group)

            operation = None
            metadata = {DataTypeMetaData.KEY_BURST: burst_id}
            algo_category = dao.get_algorithm_by_id(step.fk_algorithm)
            if algo_category is not None:
                algo_category = algo_category.algorithm_category

            for wf_idx, workflow in enumerate(workflows):
                cloned_w_step = step.clone()
                cloned_w_step.fk_workflow = workflow.id
                dynamic_params = cloned_w_step.dynamic_param
                op_params = cloned_w_step.static_param
                op_params.update(dynamic_params)
                range_values = None
                group_id = None
                if operation_group is not None:
                    group_id = operation_group.id
                    range_values = sim_operations[wf_idx].range_values

                if not isinstance(step, model.WorkflowStepView):
                    ## For visualization steps, do not create operations, as those are not really needed.
                    metadata, user_group = self._prepare_metadata(metadata, algo_category, operation_group, op_params)
                    operation = model.Operation(user_id, project_id, step.fk_algorithm,
                                                json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder),
                                                meta=json.dumps(metadata),
                                                op_group_id=group_id, range_values=range_values, user_group=user_group)
                    operation.visible = step.step_visible
                    operation = dao.store_entity(operation)
                    cloned_w_step.fk_operation = operation.id

                dao.store_entity(cloned_w_step)

            if operation_group is not None and operation is not None:
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation.id,
                                                     fk_parent_burst=burst_id,
                                                     state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)


    def initiate_prelaunch(self, operation, adapter_instance, temp_files, **kwargs):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        try:
            unique_id = None
            if self.ATT_UID in kwargs:
                unique_id = kwargs[self.ATT_UID]
            filtered_kwargs = adapter_instance.prepare_ui_inputs(kwargs)
            self.logger.debug("Launching operation " + str(operation.id) + " with " + str(filtered_kwargs))
            operation = dao.get_operation_by_id(operation.id)   # Load Lazy fields

            params = dict()
            for k, value_ in filtered_kwargs.items():
                params[str(k)] = value_

            disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
            user_disk_space = dao.compute_user_generated_disk_size(operation.fk_launched_by)    # From kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            result_msg, nr_datatypes = adapter_instance._prelaunch(operation, unique_id, available_space, **params)
            operation = dao.get_operation_by_id(operation.id)
            ## Update DB stored kwargs for search purposes, to contain only valuable params (no unselected options)
            operation.parameters = json.dumps(kwargs)
            operation.mark_complete(model.STATUS_FINISHED)
            if nr_datatypes > 0:
                #### Write operation meta-XML only if some result are returned
                self.file_helper.write_operation_metadata(operation)
            dao.store_entity(operation)
            self._remove_files(temp_files)

        except zipfile.BadZipfile as excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
        except TVBException as excep:
            self._handle_exception(excep, temp_files, excep.message, operation)
        except MemoryError:
            msg = ("Could not execute operation because there is not enough free memory." +
                   " Please adjust operation parameters and re-launch it.")
            self._handle_exception(Exception(msg), temp_files, msg, operation)
        except Exception as excep1:
            msg = "Could not launch Operation with the given input data!"
            self._handle_exception(excep1, temp_files, msg, operation)

        ### Try to find next workflow Step. It might throw WorkflowException
        next_op_id = self.workflow_service.prepare_next_step(operation.id)
        self.launch_operation(next_op_id)
        return result_msg


    def _send_to_cluster(self, operations, adapter_instance, current_username="******"):
        """ Initiate operation on cluster"""
        for operation in operations:
            try:
                BACKEND_CLIENT.execute(str(operation.id), current_username, adapter_instance)
            except Exception as excep:
                self._handle_exception(excep, {}, "Could not start operation!", operation)

        return operations


    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                adapter_instance = ABCAdapter.build_adapter(algorithm)
            parsed_params = utils.parse_json_parameters(operation.parameters)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance, operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance, {}, **parsed_params)


    def _handle_exception(self, exception, temp_files, message, operation=None):
        """
        Common way to treat exceptions:
            - remove temporary files, if any
            - set status ERROR on current operation (if any)
            - log exception
        """
        self.logger.exception(message)
        if operation is not None:
            self.workflow_service.persist_operation_state(operation, model.STATUS_ERROR, unicode(exception))
            self.workflow_service.update_executed_workflow_state(operation)
        self._remove_files(temp_files)
        exception.message = message
        raise exception, None, sys.exc_info()[2]  # when rethrowing in python this is required to preserve the stack trace


    def _remove_files(self, file_dictionary):
        """
        Remove any files that exist in the file_dictionary. 
        Currently used to delete temporary files created during an operation.
        """
        for pth in file_dictionary.itervalues():
            pth = str(pth)
            try:
                if os.path.exists(pth) and os.path.isfile(pth):
                    os.remove(pth)
                    self.logger.debug("We no longer need file:" + pth + " => deleted")
                else:
                    self.logger.warning("Trying to remove not existent file:" + pth)
            except OSError:
                self.logger.exception("Could not cleanup file!")


    @staticmethod
    def _range_name(range_no):
        return model.PARAM_RANGE_PREFIX + str(range_no)


    def _prepare_group(self, project_id, existing_dt_group, kwargs):
        """
        Create and store OperationGroup entity, or return None
        """
        # Standard ranges as accepted from UI
        range1_values = self.get_range_values(kwargs, self._range_name(1))
        range2_values = self.get_range_values(kwargs, self._range_name(2))
        available_args = self.__expand_arguments([(kwargs, None)], range1_values, self._range_name(1))
        available_args = self.__expand_arguments(available_args, range2_values, self._range_name(2))
        is_group = False
        ranges = []
        if self._range_name(1) in kwargs and range1_values is not None:
            is_group = True
            ranges.append(json.dumps((kwargs[self._range_name(1)], range1_values)))
        if self._range_name(2) in kwargs and range2_values is not None:
            is_group = True
            ranges.append(json.dumps((kwargs[self._range_name(2)], range2_values)))
        # Now for additional ranges which might be the case for the 'model exploration'
        last_range_idx = 3
        ranger_name = self._range_name(last_range_idx)
        while ranger_name in kwargs:
            values_for_range = self.get_range_values(kwargs, ranger_name)
            available_args = self.__expand_arguments(available_args, values_for_range, ranger_name)
            last_range_idx += 1
            ranger_name = self._range_name(last_range_idx)
        if last_range_idx > 3:
            ranges = []  # Since we only have 3 fields in db for this just hide it
        if not is_group:
            group = None
        elif existing_dt_group is None:
            group = model.OperationGroup(project_id=project_id, ranges=ranges)
            group = dao.store_entity(group)
        else:
            group = existing_dt_group.parent_operation_group

        return available_args, group


    def get_range_values(self, kwargs, ranger_name):
        """
        For the ranger given by ranger_name look in kwargs and return
        the array with all the possible values.
        """
        if ranger_name not in kwargs:
            return None
        if str(kwargs[ranger_name]) not in kwargs:
            return None

        range_values = []
        try:
            range_data = json.loads(str(kwargs[str(kwargs[ranger_name])]))
        except Exception:
            try:
                range_data = [x.strip() for x in str(kwargs[str(kwargs[ranger_name])]).split(',') if len(x.strip()) > 0]
                return range_data
            except Exception:
                self.logger.exception("Could not launch operation !")
                raise LaunchException("Could not launch with no data from:" + str(ranger_name))
        if type(range_data) in (list, tuple):
            return range_data

        if (constants.ATT_MINVALUE in range_data) and (constants.ATT_MAXVALUE in range_data):
            lo_val = float(range_data[constants.ATT_MINVALUE])
            hi_val = float(range_data[constants.ATT_MAXVALUE])
            step = float(range_data[constants.ATT_STEP])
            range_values = list(Range(lo=lo_val, hi=hi_val, step=step, mode=Range.MODE_INCLUDE_BOTH))

        else:
            for possible_value in range_data:
                if range_data[possible_value]:
                    range_values.append(possible_value)
        return range_values


    @staticmethod
    def __expand_arguments(arguments_list, range_values, range_title):
        """
        Parse the arguments submitted from UI (flatten form) 
        If any ranger is found, return a list of arguments for all possible operations.
        """
        if range_values is None:
            return arguments_list
        result = []
        for value in range_values:
            for args, range_ in arguments_list:
                kw_new = copy(args)
                range_new = copy(range_)
                kw_new[kw_new[range_title]] = value
                if range_new is None:
                    range_new = {}
                range_new[kw_new[range_title]] = value
                del kw_new[range_title]
                result.append((kw_new, range_new))
        return result


    ##########################################################################################
    ######## Methods related to stopping and restarting operations start here ################
    ##########################################################################################

    def stop_operation(self, operation_id):
        """
        Stop the operation given by the operation id.
        """
        return BACKEND_CLIENT.stop_operation(int(operation_id))
Пример #18
0
 def __init__(self):
     self.operation_service = OperationService()
     self.workflow_service = WorkflowService()
     self.logger = get_logger(self.__class__.__module__)
class WorkflowTest(TransactionalTestCase):
    """
    Test that workflow conversion methods are valid.
    """
    def setUp(self):
        """
        Sets up the testing environment;
        saves config file;
        creates a test user, a test project;
        creates burst, operation, flow and workflow services
        """
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.workflow_service = WorkflowService()
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.flow_service = FlowService()

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.delete_project_folders()

    def __create_complex_workflow(self, workflow_step_list):
        """
        Creates a burst with a complex workflow with a given list of workflow steps.
        :param workflow_step_list: a list of workflow steps that will be used in the
            creation of a new workflow for a new burst
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())

        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id=self.test_project.id,
            burst_id=burst_config.id,
            simulator_index=0,
            simulator_id=first_step_algorithm.id,
            operations=operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        #fire the first op
        if len(operations) > 0:
            self.operation_service.launch_operation(operations[0].id, False)
        return burst_config.id

    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter2",
                "TestAdapter2",
                step_index=1,
                static_kwargs={"test2": 2}),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=2,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                })
        ]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 2,
            "DataType from second step was not stored.")
        self.assertTrue(stored_datatypes[0].type == 'Datatype1',
                        "Wrong type was stored.")
        self.assertTrue(stored_datatypes[1].type == 'Datatype1',
                        "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didnt start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didnt finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=1,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                }),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter3",
                "TestAdapter3",
                step_index=2,
                dynamic_kwargs={
                    "test": {
                        wf_cfg.DATATYPE_INDEX_KEY: 0,
                        wf_cfg.STEP_INDEX_KEY: 1
                    }
                })
        ]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 3,
            "DataType from all step were not stored.")
        for result_row in stored_datatypes:
            self.assertTrue(result_row.type in ['Datatype1', 'Datatype2'],
                            "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didn't start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didn't finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_configuration2workflow(self):
        """
        Test that building a WorkflowStep from a WorkflowStepConfiguration. Make sure all the data is
        correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index.
        """
        workflow_step = TestFactory.create_workflow_step(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapter1",
            static_kwargs={"static_param": "test"},
            dynamic_kwargs={
                "dynamic_param": {
                    wf_cfg.STEP_INDEX_KEY: 0,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            step_index=1,
            base_step=5)
        self.assertEqual(workflow_step.step_index, 1,
                         "Wrong step index in created workflow step.")
        self.assertEqual(workflow_step.static_param, {'static_param': 'test'},
                         'Different static parameters on step.')
        self.assertEqual(
            workflow_step.dynamic_param, {
                'dynamic_param': {
                    wf_cfg.STEP_INDEX_KEY: 5,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            "Dynamic parameters not saved properly, or base workflow index not added to step index."
        )

    def test_create_workflow(self):
        """
        Test that a workflow with all the associated workflow steps is actually created.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter2",
                "TestAdapter2",
                step_index=1,
                static_kwargs={"test2": 2}),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=2,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                })
        ]
        burst_id = self.__create_complex_workflow(workflow_step_list)
        workflow_entities = dao.get_workflows_for_burst(burst_id)
        self.assertTrue(
            len(workflow_entities) == 1,
            "For some reason workflow was not stored in database.")
        workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
        self.assertEqual(len(workflow_steps),
                         len(workflow_step_list) + 1,
                         "Wrong number of workflow steps created.")
Пример #20
0
class BurstController(BurstBaseController):
    """
    Controller class for Burst-Pages.
    """


    def __init__(self):
        BurstBaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree, Algorithm and AlgorithmGroup, for performance issues.
        algorithm, self.cached_simulator_algo_group = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.cached_simulator_algorithm_id = algorithm.id


    @property
    @context_selected
    def cached_simulator_input_tree(self):
        """
        Cache Simulator's input tree, for performance issues.
        Anyway, without restart, the introspected tree will not be different on multiple executions.
        :returns: Simulator's Input Tree (copy from cache or just loaded)
        """
        cached_simulator_tree = common.get_from_session(common.KEY_CACHED_SIMULATOR_TREE)
        if cached_simulator_tree is None:
            cached_simulator_tree = self.flow_service.prepare_adapter(common.get_current_project().id,
                                                                      self.cached_simulator_algo_group)[1]
            common.add2session(common.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree)
        return copy.deepcopy(cached_simulator_tree)


    @expose_page
    @settings
    @context_selected
    def index(self):
        """Get on burst main page"""
        # todo : reuse load_burst here for consistency.
        template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit",
                                      baseUrl=TvbProfile.current.web.BASE_URL,
                                      includedResources='project/included_resources')
        portlets_list = self.burst_service.get_available_portlets()
        session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_stored_burst is None or session_stored_burst.id is None:
            if session_stored_burst is None:
                session_stored_burst = self.burst_service.new_burst_configuration(common.get_current_project().id)
                common.add2session(common.KEY_BURST_CONFIG, session_stored_burst)

            adapter_interface = self.cached_simulator_input_tree
            if session_stored_burst is not None:
                current_data = session_stored_burst.get_all_simulator_values()[0]
                adapter_interface = ABCAdapter.fill_defaults(adapter_interface, current_data, True)
                ### Add simulator tree to session to be available in filters
                self.context.add_adapter_to_session(self.cached_simulator_algo_group, adapter_interface, current_data)
            template_specification['inputList'] = adapter_interface

        selected_portlets = session_stored_burst.update_selected_portlets()
        template_specification['burst_list'] = self.burst_service.get_available_bursts(common.get_current_project().id)
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(selected_portlets)
        template_specification['draw_hidden_ranges'] = True
        template_specification['burstConfig'] = session_stored_burst

        ### Prepare PSE available metrics
        ### We put here all available algorithms, because the metrics select area is a generic one, 
        ### and not loaded with every Burst Group change in history.
        algo_group = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE,
                                                                         MEASURE_METRICS_CLASS)[1]
        adapter_instance = ABCAdapter.build_adapter(algo_group)
        if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'):
            template_specification['available_metrics'] = [metric_name for metric_name
                                                           in adapter_instance.available_algorithms.keys()]
        else:
            template_specification['available_metrics'] = []

        template_specification[common.KEY_PARAMETERS_CONFIG] = False
        template_specification[common.KEY_SECTION] = 'burst'
        return self.fill_default_attributes(template_specification)


    @expose_fragment('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        return {'burst_list': self.burst_service.get_available_bursts(common.get_current_project().id),
                'selectedBurst': session_burst.id}


    @cherrypy.expose
    @handle_error(redirect=False)
    def get_selected_burst(self):
        """
        Return the burst that is currently stored in session.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_burst.id:
            return str(session_burst.id)
        else:
            return 'None'


    @expose_fragment('burst/portlet_configure_parameters')
    def get_portlet_configurable_interface(self, index_in_tab):
        """
        From the position given by the tab index and the index from that tab, 
        get the portlet configuration and build the configurable interface
        for that portlet.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_index = burst_config.selected_tab
        portlet_config = burst_config.tabs[tab_index].portlets[int(index_in_tab)]
        portlet_interface = self.burst_service.build_portlet_interface(portlet_config, common.get_current_project().id)

        full_portlet_input_tree = []
        for entry in portlet_interface:
            full_portlet_input_tree.extend(entry.interface)
        self.context.add_portlet_to_session(full_portlet_input_tree)

        portlet_interface = {"adapters_list": portlet_interface,
                             common.KEY_PARAMETERS_CONFIG: False,
                             common.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION}
        return self.fill_default_attributes(portlet_interface)


    @expose_fragment('burst/portlets_preview')
    def portlet_tab_display(self, **data):
        """
        When saving a new configuration of tabs, check if any of the old 
        portlets are still present, and if that is the case use their 
        parameters configuration. 
        
        For all the new portlets add entries in the burst configuration. 
        Also remove old portlets that are no longer saved.
        """
        tab_portlets_list = json.loads(data['tab_portlets_list'])
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        selected_tab_idx = burst_config.selected_tab
        for tab_idx in xrange(len(tab_portlets_list)):
            current_tab = burst_config.tabs[tab_idx]
            ### When configuration already exists, and new portlets          #####
            ### are selected, first check if any configuration was saved for #####
            ### each portlet and if that is the case, use it. If none is present #
            ### create a new one.                                              ###
            for idx_in_tab in xrange(len(tab_portlets_list[tab_idx])):
                portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0]
                portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1]
                if portlet_id >= 0:
                    saved_config = current_tab.portlets[idx_in_tab]
                    if saved_config is None or saved_config.portlet_id != portlet_id:
                        current_tab.portlets[idx_in_tab] = self.burst_service.new_portlet_configuration(portlet_id,
                                                                                    tab_idx, idx_in_tab, portlet_name)
                    else:
                        saved_config.visualizer.ui_name = portlet_name
                else:
                    current_tab.portlets[idx_in_tab] = None
            #For generating the HTML get for each id the corresponding portlet
        selected_tab_portlets = []
        saved_selected_tab = burst_config.tabs[selected_tab_idx]
        for portlet in saved_selected_tab.portlets:
            if portlet:
                portlet_id = int(portlet.portlet_id)
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_id)
                portlet_entity.name = portlet.name
                selected_tab_portlets.append(portlet_entity)

        return {'portlet_tab_list': selected_tab_portlets}


    @expose_fragment('burst/portlets_preview')
    def get_configured_portlets(self):
        """
        Return the portlets for one given tab. This is used when changing
        from tab to tab and selecting which portlets will be displayed.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        if burst_config is None:
            return {'portlet_tab_list': []}

        tab_idx = burst_config.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets:
            if portlet_cfg is not None:
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id)
                portlet_entity.name = portlet_cfg.name
                tab_portlet_list.append(portlet_entity)
        return {'portlet_tab_list': tab_portlet_list}


    @expose_json
    def change_selected_tab(self, tab_nr):
        """
        Set :param tab_nr: as the currently selected tab in the stored burst
        configuration. 
        """
        common.get_from_session(common.KEY_BURST_CONFIG).selected_tab = int(tab_nr)


    @expose_json
    def get_portlet_session_configuration(self):
        """
        Get the current configuration of portlets stored in session for this burst,
        as a json.
        """
        burst_entity = common.get_from_session(common.KEY_BURST_CONFIG)
        returned_configuration = burst_entity.update_selected_portlets()
        return returned_configuration


    @cherrypy.expose
    @handle_error(redirect=False)
    def save_parameters(self, index_in_tab, **data):
        """
        Save parameters
        
        :param tab_nr: the index of the selected tab
        :param index_in_tab: the index of the configured portlet in the selected tab
        :param data: the {"portlet_parameters": json_string} Where json_string is a Jsonified dictionary
            {"name": value}, representing the configuration of the current portlet
        
        Having these inputs, current method updated the configuration of the portlet in the
        corresponding tab position form the burst configuration in session.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_nr = burst_config.selected_tab
        old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(index_in_tab)]
        data = json.loads(data['portlet_parameters'])

        # Replace all void entries with 'None'
        for entry in data:
            if data[entry] == '':
                data[entry] = None

        need_relaunch = self.burst_service.update_portlet_configuration(old_portlet_config, data)
        if need_relaunch:
            #### Reset Burst Configuration into an entity not persisted (id = None for all)
            common.add2session(common.KEY_BURST_CONFIG, burst_config.clone())
            return "relaunchView"
        else:
            self.workflow_service.store_workflow_step(old_portlet_config.visualizer)
            return "noRelaunch"


    @expose_json
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        validation_result = self._is_burst_name_ok(burst_name)
        if validation_result is True:
            self.burst_service.rename_burst(burst_id, burst_name)
            return {'success': "Simulation successfully renamed!"}
        else:
            return {'error': validation_result}


    @expose_json
    def launch_burst(self, launch_mode, burst_name, **data):
        """
        Do the actual burst launch, using the configuration saved in current session.
        :param launch_mode: new/branch/continue
        :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x)
        :param data: kwargs for simulation input parameters.
        """
        data = json.loads(data['simulator_parameters'])
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)

        ## Validate new burst-name
        if launch_mode == LAUNCH_NEW and burst_name != 'none_undefined':
            validation_result = self._is_burst_name_ok(burst_name)
            if validation_result is True:
                burst_config.name = burst_name
            else:
                return {'error': validation_result}

        ## Fill all parameters 
        user_id = common.get_logged_user().id
        data[common.KEY_ADAPTER] = self.cached_simulator_algorithm_id
        burst_config.update_simulator_configuration(data)
        burst_config.fk_project = common.get_current_project().id

        ## Do the asynchronous launch
        burst_id, burst_name = self.burst_service.launch_burst(burst_config, 0, self.cached_simulator_algorithm_id,
                                                               user_id, launch_mode)
        return {'id': burst_id, 'name': burst_name}


    @expose_json
    def load_burst(self, burst_id):
        """
        Given a burst id return its running status, weather it was a operation group and the selected tab.
        This is called when a burst is selected in the history,
        when returning from a burst config page (model param or noise)
        and when the status of running simulations is polled.
        Besides returning these values it updates the session stored burst.

        A burst configuration has 2 meanings.
        It is a staging configuration for a new burst (stored in transients in the session).
        It is the configuration used to launch a simulation and it's running status (stored in the db).
        This method has to merge the two meanings.
        If the requested burst_id is different from the one held in the session,
        then the burst config is loaded from the db, discarding any session stored config.
        If the id is the same then the session config is kept.
        """
        try:
            burst_id = int(burst_id)
            old_burst = common.get_from_session(common.KEY_BURST_CONFIG)
            burst, group_gid = self.burst_service.load_burst(burst_id)

            if old_burst.id == burst_id:
                # This function was called to reload the current burst.
                # Merge session config into the db config. Overwrite all transient fields
                burst.simulator_configuration = old_burst.simulator_configuration
                burst.dynamic_ids = old_burst.dynamic_ids

            burst.selected_tab = old_burst.selected_tab
            common.add2session(common.KEY_BURST_CONFIG, burst)
            return {'status': burst.status, 'group_gid': group_gid, 'selected_tab': burst.selected_tab}
        except Exception:
            ### Most probably Burst was removed. Delete it from session, so that client 
            ### has a good chance to get a good response on refresh
            self.logger.exception("Error loading burst")
            common.remove_from_session(common.KEY_BURST_CONFIG)
            raise


    @expose_json
    def get_history_status(self, **data):
        """
        For each burst id received, get the status and return it.
        """
        return self.burst_service.update_history_status(json.loads(data['burst_ids']))


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel or Remove the burst entity given by burst_id.
        :returns 'reset-new': When currently selected burst was removed. JS will need to reset selection to a new entry
        :returns 'canceled': When current burst was still running and was just stopped.
        :returns 'done': When no action is required on the client.
        """
        burst_id = int(burst_id)
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        removed = self.burst_service.cancel_or_remove_burst(burst_id)
        if removed:
            if session_burst.id == burst_id:
                return "reset-new"
            return 'done'
        else:
            # Burst was stopped since it was running
            return 'canceled'


    @expose_json
    def get_selected_portlets(self):
        """
        Get the selected portlets for the loaded burst.
        """
        burst = common.get_from_session(common.KEY_BURST_CONFIG)
        return burst.update_selected_portlets()


    @cherrypy.expose
    @handle_error(redirect=False)
    def get_visualizers_for_operation_id(self, op_id, width, height):
        """
        Method called from parameters exploration page in case a burst with a range of parameters
        for the simulator was launched. 
        :param op_id: the selected operation id from the parameter space exploration.
        :param width: the width of the right side display
        :param height: the height of the right side display
        
        Given these parameters first get the workflow to which op_id belongs, then load the portlets
        from that workflow as the current burst configuration. Width and height are used to get the
        proper sizes for the visualization iFrames. 
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        burst_config = self.burst_service.load_tab_configuration(burst_config, op_id)
        common.add2session(common.KEY_BURST_CONFIG, burst_config)
        return self.load_configured_visualizers(width, height)


    @expose_fragment("burst/portlets_view")
    def load_configured_visualizers(self, width='800', height='600'):
        """
        Load all the visualization steps for this tab. Width and height represent
        the dimensions of the right side Div, so that we can compute for each iFrame
        the maximum size it's visualizer can take.
        """
        burst = common.get_from_session(common.KEY_BURST_CONFIG)
        selected_tab = burst.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst.tabs[int(selected_tab)].portlets:
            if portlet_cfg is not None:
                tab_portlet_list.append(self.__portlet_config2portlet_entity(portlet_cfg))
        return {'status': burst.status, 'portlet_tab_list': tab_portlet_list,
                'max_width': int(width), 'max_height': int(height), 'model': tvb.core.entities.model}


    @expose_fragment("burst/portlet_visualization_template")
    def check_status_for_visualizer(self, selected_tab, index_in_tab, width='800', height='600'):
        """
        This call is used to check on a regular basis if the data for a certain portlet is 
        available for visualization. Should return the status and the HTML to be displayed.
        """
        burst = common.get_from_session(common.KEY_BURST_CONFIG)
        target_portlet = burst.tabs[int(selected_tab)].portlets[int(index_in_tab)]
        target_portlet = self.__portlet_config2portlet_entity(target_portlet)
        template_dict = {'portlet_entity': target_portlet, 'model': tvb.core.entities.model,
                         'width': int(width), 'height': int(height)}
        return template_dict


    @expose_json
    def reset_burst(self):
        """
        Called when click on "New Burst" entry happens from UI.
        This will generate an empty new Burst Configuration.
        """
        common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE)
        new_burst = self.burst_service.new_burst_configuration(common.get_current_project().id)
        common.add2session(common.KEY_BURST_CONFIG, new_burst)


    @cherrypy.expose
    @handle_error(redirect=False)
    def copy_burst(self, burst_id):
        """
        When currently selected entry is a valid Burst, create a clone of that Burst.
        """
        common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE)
        base_burst = self.burst_service.load_burst(burst_id)[0]
        if (base_burst is None) or (base_burst.id is None):
            return self.reset_burst()
        common.add2session(common.KEY_BURST_CONFIG, base_burst.clone())
        return base_burst.name


    @expose_fragment("burst/base_portlets_iframe")
    def launch_visualization(self, index_in_tab, frame_width, frame_height, method_name="generate_preview"):
        """
        Launch the visualization for this tab and index in tab. The width and height represent the maximum of the inner 
        visualization canvas so that it can fit in the iFrame.
        """
        result = {}
        try:
            burst = common.get_from_session(common.KEY_BURST_CONFIG)
            visualizer = burst.tabs[burst.selected_tab].portlets[int(index_in_tab)].visualizer
            result = self.burst_service.launch_visualization(visualizer, float(frame_width),
                                                             float(frame_height), method_name)[0]
            result['launch_success'] = True
        except Exception, ex:
            result['launch_success'] = False
            result['error_msg'] = str(ex)
            self.logger.exception("Could not launch Portlet Visualizer...")

        return self.fill_default_attributes(result)
Пример #21
0
class BurstService(object):
    """
    Service layer for Burst related entities.
    """

    def __init__(self):
        self.operation_service = OperationService()
        self.workflow_service = WorkflowService()
        self.logger = get_logger(self.__class__.__module__)
        self.cache_portlet_configurators = {}


    def build_portlet_interface(self, portlet_configuration, project_id):
        """
        From a portlet_id and a project_id, first build the portlet
        entity then get it's configurable interface. 
        
        :param portlet_configuration: a portlet configuration entity. It holds at the
            least the portlet_id, and in case any default parameters were saved
            they can be rebuilt from the analyzers // visualizer parameters
        :param project_id: the id of the current project   
            
        :returns: the portlet interface will be of the following form::
            [{'interface': adapter_interface, 
            'prefix': prefix_for_parameter_names, 
            'subalg': {algorithm_field_name: default_algorithm_value},
            'algo_group': algorithm_group,
            'alg_ui_name': displayname},
            ......]
            A list of dictionaries for each adapter that makes up the portlet.
            
        """
        portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id)
        portlet_interface = portlet_configurer.get_configurable_interface()

        for adapter_conf in portlet_interface:
            interface = adapter_conf.interface
            itree_mngr = InputTreeManager()
            interface = itree_mngr.fill_input_tree_with_options(interface, project_id,
                                                                adapter_conf.stored_adapter.fk_category)
            adapter_conf.interface = itree_mngr.prepare_param_names(interface)

        portlet_configurer.update_default_values(portlet_interface, portlet_configuration)
        portlet_configurer.prefix_adapters_parameters(portlet_interface)

        return portlet_interface


    def _get_portlet_configurer(self, portlet_id):

        if portlet_id not in self.cache_portlet_configurators:

            portlet_entity = dao.get_portlet_by_id(portlet_id)
            if portlet_entity is None:
                raise InvalidPortletConfiguration("No portlet entity located in database with id=%s. " % portlet_id)

            self.cache_portlet_configurators[portlet_id] = PortletConfigurer(portlet_entity)
            self.logger.debug("Recently parsed portlet XML:" + str([portlet_entity]))

        return self.cache_portlet_configurators[portlet_id]


    def update_portlet_configuration(self, portlet_configuration, submited_parameters):
        """
        :param portlet_configuration: the portlet configuration that needs to be updated
        :param submited_parameters: a list of parameters as submitted from the UI. This 
            is a dictionary in the form : 
            {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}}
            
        All names are prefixed with adapter specific generated prefix.
        """
        portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id)
        return portlet_configurer.update_portlet_configuration(portlet_configuration, submited_parameters)


    def new_burst_configuration(self, project_id):
        """
        Return a new burst configuration entity with all the default values.
        """
        burst_configuration = model.BurstConfiguration(project_id)
        burst_configuration.selected_tab = 0

        # Now set the default portlets for the specified burst configuration.
        # The default portlets are specified in the __init__.py script from tvb root.
        for tab_idx, value in DEFAULT_PORTLETS.items():
            for sel_idx, portlet_identifier in value.items():
                portlet = BurstService.get_portlet_by_identifier(portlet_identifier)
                if portlet is not None:
                    portlet_configuration = self.new_portlet_configuration(portlet.id, tab_idx, sel_idx,
                                                                           portlet.algorithm_identifier)
                    burst_configuration.set_portlet(tab_idx, sel_idx, portlet_configuration)

        return burst_configuration


    @staticmethod
    def _store_burst_config(burst_config):
        """
        Store a burst configuration entity.
        """
        burst_config.prepare_before_save()
        saved_entity = dao.store_entity(burst_config)
        return saved_entity.id


    @staticmethod
    def get_available_bursts(project_id):
        """
        Return all the burst for the current project.
        """
        bursts = dao.get_bursts_for_project(project_id, page_size=MAX_BURSTS_DISPLAYED) or []
        for burst in bursts:
            burst.prepare_after_load()
        return bursts


    @staticmethod
    def populate_burst_disk_usage(bursts):
        """
        Adds a disk_usage field to each burst object.
        The disk usage is computed as the sum of the datatypes generated by a burst
        """
        sizes = dao.compute_bursts_disk_size([b.id for b in bursts])
        for b in bursts:
            b.disk_size = format_bytes_human(sizes[b.id])


    @staticmethod
    def rename_burst(burst_id, new_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.name = new_name
        dao.store_entity(burst)


    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid

    @staticmethod
    def __populate_tabs_from_workflow(burst_entity, workflow):
        """
        Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations
        generated from the steps of the workflow.
        """
        visualizers = dao.get_visualization_steps(workflow.id)
        for entry in visualizers:
            ## For each visualize step, also load all of the analyze steps.
            portlet_cfg = PortletConfiguration(entry.fk_portlet)
            portlet_cfg.set_visualizer(entry)
            analyzers = dao.get_workflow_steps_for_position(entry.fk_workflow, entry.tab_index, entry.index_in_tab)
            portlet_cfg.set_analyzers(analyzers)
            burst_entity.tabs[entry.tab_index].portlets[entry.index_in_tab] = portlet_cfg
        return burst_entity

    def load_tab_configuration(self, burst_entity, op_id):
        """
        Given a burst entity and an operation id, find the workflow to which the op_id
        belongs and the load the burst_entity's tab configuration with those workflow steps.
        """
        originating_workflow = dao.get_workflow_for_operation_id(op_id)
        burst_entity = self.__populate_tabs_from_workflow(burst_entity, originating_workflow)
        return burst_entity


    def new_portlet_configuration(self, portlet_id, tab_nr=-1, index_in_tab=-1, portlet_name='Default'):
        """
        Return a new portlet configuration entity with default parameters.
        
        :param portlet_id: the id of the portlet for which a configuration will be stored
        :param tab_nr: the index of the currently selected tab
        :param index_in_tab: the index from the currently selected tab
        """
        portlet_configurer = self._get_portlet_configurer(portlet_id)
        configuration = portlet_configurer.create_new_portlet_configuration(portlet_name)
        for wf_step in configuration.analyzers:
            wf_step.tab_index = tab_nr
            wf_step.index_in_tab = index_in_tab
        configuration.visualizer.tab_index = tab_nr
        configuration.visualizer.index_in_tab = index_in_tab
        return configuration


    @staticmethod
    def get_available_portlets():
        """
        :returns: a list of all the available portlet entites
        """
        return dao.get_available_portlets()

    @staticmethod
    def get_portlet_by_id(portlet_id):
        """
        :returns: the portlet entity with the id =@portlet_id
        """
        return dao.get_portlet_by_id(portlet_id)

    @staticmethod
    def get_portlet_by_identifier(portlet_identifier):
        """
        :returns: the portlet entity with the algorithm identifier =@portlet_identifier
        """
        return dao.get_portlet_by_identifier(portlet_identifier)


    def launch_burst(self, burst_configuration, simulator_index, simulator_id, user_id, launch_mode=LAUNCH_NEW):
        """
        Given a burst configuration and all the necessary data do the actual launch.
        
        :param burst_configuration: BurstConfiguration   
        :param simulator_index: the position within the workflows step list that the simulator will take. This is needed
            so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from.
        :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo
            group and category that are then passed to the launcher's prepare_operation method.
        :param user_id: the id of the user that launched this burst
        :param launch_mode: new/branch/continue
        """
        ## 1. Prepare BurstConfiguration entity
        if launch_mode == LAUNCH_NEW:
            ## Fully new entity for new simulation
            burst_config = burst_configuration.clone()
            if burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                burst_config.name = 'simulation_' + str(new_id)
        else:
            ## Branch or Continue simulation
            burst_config = burst_configuration
            simulation_state = dao.get_generic_entity(SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS,
                                                      burst_config.id, "fk_parent_burst")
            if simulation_state is None or len(simulation_state) < 1:
                exc = BurstServiceException("Simulation State not found for %s, "
                                            "thus we are unable to branch from it!" % burst_config.name)
                self.logger.error(exc)
                raise exc

            simulation_state = simulation_state[0]
            burst_config.update_simulation_parameter("simulation_state", simulation_state.gid)
            burst_config = burst_configuration.clone()

            count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project)
            burst_config.name = burst_config.name + "_" + launch_mode + str(count)

        ## 2. Create Operations and do the actual launch  
        if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]:
            ## New Burst entry in the history
            burst_id = self._store_burst_config(burst_config)
            thread = threading.Thread(target=self._async_launch_and_prepare,
                                      kwargs={'burst_config': burst_config,
                                              'simulator_index': simulator_index,
                                              'simulator_id': simulator_id,
                                              'user_id': user_id})
            thread.start()
            return burst_id, burst_config.name
        else:
            ## Continue simulation
            ## TODO
            return burst_config.id, burst_config.name


    @transactional
    def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id):
        """
        Prepare all required operations for burst launch.
        """
        project_id = burst_config.fk_project
        burst_id = burst_config.id
        workflow_step_list = []
        starting_index = simulator_index + 1

        sim_algo = FlowService().get_algorithm_by_identifier(simulator_id)
        metadata = {DataTypeMetaData.KEY_BURST: burst_id}
        launch_data = burst_config.get_all_simulator_values()[0]
        operations, group = self.operation_service.prepare_operations(user_id, project_id, sim_algo,
                                                                      sim_algo.algorithm_category, metadata,
                                                                      **launch_data)
        group_launched = group is not None
        if group_launched:
            starting_index += 1

        for tab in burst_config.tabs:
            for portlet_cfg in tab.portlets:
                ### For each portlet configuration stored, update the step index ###
                ### and also change the dynamic parameters step indexes to point ###
                ### to the simulator outputs.                                     ##
                if portlet_cfg is not None:
                    analyzers = portlet_cfg.analyzers
                    visualizer = portlet_cfg.visualizer
                    for entry in analyzers:
                        entry.step_index = starting_index
                        self.workflow_service.set_dynamic_step_references(entry, simulator_index)
                        workflow_step_list.append(entry)
                        starting_index += 1
                    ### Change the dynamic parameters to point to the last adapter from this portlet execution.
                    visualizer.step_visible = False
                    if len(workflow_step_list) > 0 and isinstance(workflow_step_list[-1], model.WorkflowStep):
                        self.workflow_service.set_dynamic_step_references(visualizer, workflow_step_list[-1].step_index)
                    else:
                        self.workflow_service.set_dynamic_step_references(visualizer, simulator_index)
                    ### Only for a single operation have the step of visualization, otherwise is useless.
                    if not group_launched:
                        workflow_step_list.append(visualizer)

        if group_launched:
            ###  For a group of operations, make sure the metric for PSE view 
            ### is also computed, immediately after the simulation.
            metric_algo = FlowService().get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
            metric_interface = FlowService().prepare_adapter(project_id, metric_algo)
            dynamics = {}
            for entry in metric_interface:
                # We have a select that should be the dataType and a select multiple with the 
                # required metric algorithms to be evaluated. Only dynamic parameter should be
                # the select type.
                if entry[KEY_TYPE] == TYPE_SELECT:
                    dynamics[entry[KEY_NAME]] = {WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0,
                                                 WorkflowStepConfiguration.STEP_INDEX_KEY: simulator_index}
            metric_step = model.WorkflowStep(algorithm_id=metric_algo.id, step_index=simulator_index + 1,
                                             static_param={}, dynamic_param=dynamics)
            metric_step.step_visible = False
            workflow_step_list.insert(0, metric_step)

        workflows = self.workflow_service.create_and_store_workflow(project_id, burst_id, simulator_index,
                                                                    simulator_id, operations)
        self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, user_id,
                                                                    burst_id, project_id, group, operations)
        operation_ids = [operation.id for operation in operations]
        return operation_ids


    def _async_launch_and_prepare(self, burst_config, simulator_index, simulator_id, user_id):
        """
        Prepare operations asynchronously.
        """
        try:
            operation_ids = self._prepare_operations(burst_config, simulator_index, simulator_id, user_id)
            self.logger.debug("Starting a total of %s workflows" % (len(operation_ids, )))
            wf_errs = 0
            for operation_id in operation_ids:
                try:
                    OperationService().launch_operation(operation_id, True)
                except Exception as excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " + str(len(operation_ids) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps")
        except Exception as excep:
            self.logger.error(excep)
            self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep))


    @staticmethod
    def launch_visualization(visualization, frame_width=None, frame_height=None, is_preview=True):
        """
        :param visualization: a visualization workflow step
        """
        dynamic_params = visualization.dynamic_param
        static_params = visualization.static_param
        parameters_dict = static_params
        current_project_id = 0
        # Current operation id needed for export mechanism. So far just use ##
        # the operation of the workflow_step from which the inputs are taken    ####
        for param in dynamic_params:
            step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY]
            datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]
            referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index)
            referred_operation_id = referred_workflow_step.fk_operation
            referred_operation = dao.get_operation_by_id(referred_operation_id)
            current_project_id = referred_operation.fk_launched_in
            if type(datatype_index) is IntType:
                # Entry is the output of a previous step ##
                datatypes = dao.get_results_for_operation(referred_operation_id)
                parameters_dict[param] = datatypes[datatype_index].gid
            else:
                # Entry is the input of a previous step ###
                parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index]
        algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm)
        adapter_instance = ABCAdapter.build_adapter(algorithm)
        adapter_instance.current_project_id = current_project_id
        prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict)
        if frame_width is not None:
            prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height)

        if is_preview:
            result = adapter_instance.generate_preview(**prepared_inputs)
        else:
            result = adapter_instance.launch(**prepared_inputs)
        return result, parameters_dict


    def update_history_status(self, id_list):
        """
        For each burst_id received in the id_list read new status from DB and return a list [id, new_status] pair.
        """
        result = []
        for b_id in id_list:
            burst = dao.get_burst_by_id(b_id)
            burst.prepare_after_load()
            if burst is not None:
                if burst.status == burst.BURST_RUNNING:
                    running_time = datetime.now() - burst.start_time
                else:
                    running_time = burst.finish_time - burst.start_time
                running_time = format_timedelta(running_time, most_significant2=False)

                if burst.status == burst.BURST_ERROR:
                    msg = 'Check Operations page for error Message'
                else:
                    msg = ''
                result.append([burst.id, burst.status, burst.is_group, msg, running_time])
            else:
                self.logger.debug("Could not find burst with id=" + str(b_id) + ". Might have been deleted by user!!")
        return result


    def stop_burst(self, burst_entity):
        """
        Stop all the entities for the current burst and set the burst status to canceled.
        """
        burst_wfs = dao.get_workflows_for_burst(burst_entity.id)
        any_stopped = False
        for workflow in burst_wfs:
            wf_steps = dao.get_workflow_steps(workflow.id)
            for step in wf_steps:
                if step.fk_operation is not None:
                    self.logger.debug("We will stop operation: %d" % step.fk_operation)
                    any_stopped = self.operation_service.stop_operation(step.fk_operation) or any_stopped

        if any_stopped and burst_entity.status != burst_entity.BURST_CANCELED:
            self.workflow_service.mark_burst_finished(burst_entity, model.BurstConfiguration.BURST_CANCELED)
            return True
        return False


    @transactional
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else 
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        # Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid, False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!")
        return True


    @staticmethod
    def get_portlet_status(portlet_cfg):
        """ 
        Get the status of a portlet configuration. 
        """
        if portlet_cfg.analyzers:
            for analyze_step in portlet_cfg.analyzers:
                operation = dao.try_get_operation_by_id(analyze_step.fk_operation)
                if operation is None:
                    return model.STATUS_ERROR, "Operation has been removed"
                if operation.status != model.STATUS_FINISHED:
                    return operation.status, operation.additional_info or ''
        else:
            ## Simulator is first step so now decide if we are waiting for input or output ##
            visualizer = portlet_cfg.visualizer
            wait_on_outputs = False
            for entry in visualizer.dynamic_param:
                if type(visualizer.dynamic_param[entry][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) == IntType:
                    wait_on_outputs = True
                    break
            if wait_on_outputs:
                simulator_step = dao.get_workflow_step_by_step_index(visualizer.fk_workflow, 0)
                operation = dao.try_get_operation_by_id(simulator_step.fk_operation)
                if operation is None:
                    error_msg = ("At least one simulation result was not found, it might have been removed. <br\>"
                                 "You can copy and relaunch current simulation, if you are interested in having "
                                 "your results re-computed.")
                    return model.STATUS_ERROR, error_msg
                else:
                    return operation.status, operation.additional_info or ''
        return model.STATUS_FINISHED, ''
Пример #22
0
class TestWorkflow(TransactionalTestCase):
    """
    Test that workflow conversion methods are valid.
    """


    def transactional_setup_method(self):
        """
        Sets up the testing environment;
        saves config file;
        creates a test user, a test project;
        creates burst, operation, flow and workflow services
        """
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.workflow_service = WorkflowService()
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.flow_service = FlowService()


    def transactional_teardown_method(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.delete_project_folders()


    def __create_complex_workflow(self, workflow_step_list):
        """
        Creates a burst with a complex workflow with a given list of workflow steps.
        :param workflow_step_list: a list of workflow steps that will be used in the
            creation of a new workflow for a new burst
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1())

        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class("tvb.tests.framework.adapters.testadapter1",
                                                                                   "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id,
                                                                      first_step_algorithm,
                                                                      first_step_algorithm.algorithm_category,
                                                                      metadata, **kwargs)

        workflows = self.workflow_service.create_and_store_workflow(project_id=self.test_project.id,
                                                                    burst_id=burst_config.id,
                                                                    simulator_index=0,
                                                                    simulator_id=first_step_algorithm.id,
                                                                    operations=operations)
        self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id,
                                                                    burst_config.id, self.test_project.id, group,
                                                                    operations)
        #fire the first op
        if len(operations) > 0:
            self.operation_service.launch_operation(operations[0].id, False)
        return burst_config.id


    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 2, "DataType from second step was not stored."
        assert  stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
        assert  stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didnt start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didnt finish."
        assert  error == 0, "Some operations finished with error status."


    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=1,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter3",
                                                               "TestAdapter3", step_index=2,
                                                               dynamic_kwargs={
                                                                   "test": {wf_cfg.DATATYPE_INDEX_KEY: 0,
                                                                            wf_cfg.STEP_INDEX_KEY: 1}})]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 3, "DataType from all step were not stored."
        for result_row in stored_datatypes:
            assert  result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didn't start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didn't finish."
        assert  error == 0, "Some operations finished with error status."


    def test_configuration2workflow(self):
        """
        Test that building a WorkflowStep from a WorkflowStepConfiguration. Make sure all the data is
        correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index.
        """
        workflow_step = TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1", "TestAdapter1",
                                                         static_kwargs={"static_param": "test"},
                                                         dynamic_kwargs={"dynamic_param": {wf_cfg.STEP_INDEX_KEY: 0,
                                                                                           wf_cfg.DATATYPE_INDEX_KEY: 0}},
                                                         step_index=1, base_step=5)
        assert  workflow_step.step_index == 1, "Wrong step index in created workflow step."
        assert  workflow_step.static_param == {'static_param': 'test'}, 'Different static parameters on step.'
        assert  workflow_step.dynamic_param == {'dynamic_param': {wf_cfg.STEP_INDEX_KEY: 5,
                                                                         wf_cfg.DATATYPE_INDEX_KEY: 0}},\
                         "Dynamic parameters not saved properly, or base workflow index not added to step index."


    def test_create_workflow(self):
        """
        Test that a workflow with all the associated workflow steps is actually created.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        burst_id = self.__create_complex_workflow(workflow_step_list)
        workflow_entities = dao.get_workflows_for_burst(burst_id)
        assert  len(workflow_entities) == 1, "For some reason workflow was not stored in database."
        workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
        assert  len(workflow_steps) == len(workflow_step_list) + 1, "Wrong number of workflow steps created."
Пример #23
0
class BurstController(BurstBaseController):
    """
    Controller class for Burst-Pages.
    """


    def __init__(self):
        BurstBaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree and Algorithm for performance issues.
        self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE,
                                                                                              SIMULATOR_CLASS)


    @property
    @context_selected
    def cached_simulator_input_tree(self):
        """
        Cache Simulator's input tree, for performance issues.
        Anyway, without restart, the introspected tree will not be different on multiple executions.
        :returns: Simulator's Input Tree (copy from cache or just loaded)
        """
        cached_simulator_tree = common.get_from_session(common.KEY_CACHED_SIMULATOR_TREE)
        if cached_simulator_tree is None:
            cached_simulator_tree = self.flow_service.prepare_adapter(common.get_current_project().id,
                                                                      self.cached_simulator_algorithm)
            common.add2session(common.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree)
        return copy.deepcopy(cached_simulator_tree)


    @expose_page
    @settings
    @context_selected
    def index(self):
        """Get on burst main page"""
        # todo : reuse load_burst here for consistency.
        template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit",
                                      baseUrl=TvbProfile.current.web.BASE_URL,
                                      includedResources='project/included_resources')
        portlets_list = self.burst_service.get_available_portlets()
        session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_stored_burst is None or session_stored_burst.id is None:
            if session_stored_burst is None:
                session_stored_burst = self.burst_service.new_burst_configuration(common.get_current_project().id)
                common.add2session(common.KEY_BURST_CONFIG, session_stored_burst)

            adapter_interface = self.cached_simulator_input_tree
            if session_stored_burst is not None:
                current_data = session_stored_burst.get_all_simulator_values()[0]
                adapter_interface = InputTreeManager.fill_defaults(adapter_interface, current_data, True)
                ### Add simulator tree to session to be available in filters
                self.context.add_adapter_to_session(self.cached_simulator_algorithm, adapter_interface, current_data)
            template_specification['inputList'] = adapter_interface

        selected_portlets = session_stored_burst.update_selected_portlets()
        template_specification['burst_list'] = self.burst_service.get_available_bursts(common.get_current_project().id)
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(selected_portlets)
        template_specification['draw_hidden_ranges'] = True
        template_specification['burstConfig'] = session_stored_burst

        ### Prepare PSE available metrics
        ### We put here all available algorithms, because the metrics select area is a generic one, 
        ### and not loaded with every Burst Group change in history.
        algorithm = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        adapter_instance = ABCAdapter.build_adapter(algorithm)
        if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'):
            template_specification['available_metrics'] = [metric_name for metric_name
                                                           in adapter_instance.available_algorithms.keys()]
        else:
            template_specification['available_metrics'] = []

        template_specification[common.KEY_PARAMETERS_CONFIG] = False
        template_specification[common.KEY_SECTION] = 'burst'
        return self.fill_default_attributes(template_specification)


    @expose_fragment('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        bursts = self.burst_service.get_available_bursts(common.get_current_project().id)
        self.burst_service.populate_burst_disk_usage(bursts)
        return {'burst_list': bursts,
                'selectedBurst': session_burst.id}


    @cherrypy.expose
    @handle_error(redirect=False)
    def get_selected_burst(self):
        """
        Return the burst that is currently stored in session.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_burst.id:
            return str(session_burst.id)
        else:
            return 'None'


    @expose_fragment('burst/portlet_configure_parameters')
    def get_portlet_configurable_interface(self, index_in_tab):
        """
        From the position given by the tab index and the index from that tab, 
        get the portlet configuration and build the configurable interface
        for that portlet.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_index = burst_config.selected_tab
        portlet_config = burst_config.tabs[tab_index].portlets[int(index_in_tab)]
        portlet_interface = self.burst_service.build_portlet_interface(portlet_config, common.get_current_project().id)

        full_portlet_input_tree = []
        for entry in portlet_interface:
            full_portlet_input_tree.extend(entry.interface)
        self.context.add_portlet_to_session(full_portlet_input_tree)

        portlet_interface = {"adapters_list": portlet_interface,
                             common.KEY_PARAMETERS_CONFIG: False,
                             common.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION}
        return self.fill_default_attributes(portlet_interface)


    @expose_fragment('burst/portlets_preview')
    def portlet_tab_display(self, **data):
        """
        When saving a new configuration of tabs, check if any of the old 
        portlets are still present, and if that is the case use their 
        parameters configuration. 
        
        For all the new portlets add entries in the burst configuration. 
        Also remove old portlets that are no longer saved.
        """
        tab_portlets_list = json.loads(data['tab_portlets_list'])
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        selected_tab_idx = burst_config.selected_tab
        for tab_idx in range(len(tab_portlets_list)):
            current_tab = burst_config.tabs[tab_idx]
            ### When configuration already exists, and new portlets          #####
            ### are selected, first check if any configuration was saved for #####
            ### each portlet and if that is the case, use it. If none is present #
            ### create a new one.                                              ###
            for idx_in_tab in range(len(tab_portlets_list[tab_idx])):
                portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0]
                portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1]
                if portlet_id >= 0:
                    saved_config = current_tab.portlets[idx_in_tab]
                    if saved_config is None or saved_config.portlet_id != portlet_id:
                        current_tab.portlets[idx_in_tab] = self.burst_service.new_portlet_configuration(portlet_id,
                                                                                    tab_idx, idx_in_tab, portlet_name)
                    else:
                        saved_config.visualizer.ui_name = portlet_name
                else:
                    current_tab.portlets[idx_in_tab] = None
            #For generating the HTML get for each id the corresponding portlet
        selected_tab_portlets = []
        saved_selected_tab = burst_config.tabs[selected_tab_idx]
        for portlet in saved_selected_tab.portlets:
            if portlet:
                portlet_id = int(portlet.portlet_id)
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_id)
                portlet_entity.name = portlet.name
                selected_tab_portlets.append(portlet_entity)

        return {'portlet_tab_list': selected_tab_portlets}


    @expose_fragment('burst/portlets_preview')
    def get_configured_portlets(self):
        """
        Return the portlets for one given tab. This is used when changing
        from tab to tab and selecting which portlets will be displayed.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        if burst_config is None:
            return {'portlet_tab_list': []}

        tab_idx = burst_config.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets:
            if portlet_cfg is not None:
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id)
                portlet_entity.name = portlet_cfg.name
                tab_portlet_list.append(portlet_entity)
        return {'portlet_tab_list': tab_portlet_list}


    @expose_json
    def change_selected_tab(self, tab_nr):
        """
        Set :param tab_nr: as the currently selected tab in the stored burst
        configuration. 
        """
        common.get_from_session(common.KEY_BURST_CONFIG).selected_tab = int(tab_nr)


    @expose_json
    def get_portlet_session_configuration(self):
        """
        Get the current configuration of portlets stored in session for this burst,
        as a json.
        """
        burst_entity = common.get_from_session(common.KEY_BURST_CONFIG)
        returned_configuration = burst_entity.update_selected_portlets()
        return returned_configuration


    @cherrypy.expose
    @handle_error(redirect=False)
    def save_parameters(self, index_in_tab, **data):
        """
        Save parameters
        
        :param tab_nr: the index of the selected tab
        :param index_in_tab: the index of the configured portlet in the selected tab
        :param data: the {"portlet_parameters": json_string} Where json_string is a Jsonified dictionary
            {"name": value}, representing the configuration of the current portlet
        
        Having these inputs, current method updated the configuration of the portlet in the
        corresponding tab position form the burst configuration in session.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_nr = burst_config.selected_tab
        old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(index_in_tab)]
        data = json.loads(data['portlet_parameters'])

        # Replace all void entries with 'None'
        for entry in data:
            if data[entry] == '':
                data[entry] = None

        need_relaunch = self.burst_service.update_portlet_configuration(old_portlet_config, data)
        if need_relaunch:
            #### Reset Burst Configuration into an entity not persisted (id = None for all)
            common.add2session(common.KEY_BURST_CONFIG, burst_config.clone())
            return "relaunchView"
        else:
            self.workflow_service.store_workflow_step(old_portlet_config.visualizer)
            return "noRelaunch"


    @expose_json
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        validation_result = self._is_burst_name_ok(burst_name)
        if validation_result is True:
            self.burst_service.rename_burst(burst_id, burst_name)
            return {'success': "Simulation successfully renamed!"}
        else:
            return {'error': validation_result}


    @expose_json
    def launch_burst(self, launch_mode, burst_name, **data):
        """
        Do the actual burst launch, using the configuration saved in current session.
        :param launch_mode: new/branch/continue
        :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x)
        :param data: kwargs for simulation input parameters.
        """
        data = json.loads(data['simulator_parameters'])
        simulation_length = data['simulation_length']
        try:
            simulation_length = total_ms(simulation_length)
        except ValueError, e:
            return {'error': e.message}
        data['simulation_length']=unicode(simulation_length)
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)

        ## Validate new burst-name
        if launch_mode == LAUNCH_NEW and burst_name != 'none_undefined':
            validation_result = self._is_burst_name_ok(burst_name)
            if validation_result is True:
                burst_config.name = burst_name
            else:
                return {'error': validation_result}

        ## Fill all parameters 
        user_id = common.get_logged_user().id
        data[common.KEY_ADAPTER] = self.cached_simulator_algorithm.id
        burst_config.update_simulator_configuration(data)
        burst_config.fk_project = common.get_current_project().id

        ## Do the asynchronous launch
        try:
            burst_id, burst_name = self.burst_service.launch_burst(burst_config, 0, self.cached_simulator_algorithm.id,
                                                                   user_id, launch_mode)
            return {'id': burst_id, 'name': burst_name}
        except BurstServiceException as e:
            self.logger.exception("Could not launch burst!")
            return {'error': e.message}
class BurstController(BurstBaseController):
    """
    Controller class for Burst-Pages.
    """


    def __init__(self):
        BurstBaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree and Algorithm for performance issues.
        self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE,
                                                                                              SIMULATOR_CLASS)


    @property
    @context_selected
    def cached_simulator_input_tree(self):
        """
        Cache Simulator's input tree, for performance issues.
        Anyway, without restart, the introspected tree will not be different on multiple executions.
        :returns: Simulator's Input Tree (copy from cache or just loaded)
        """
        cached_simulator_tree = common.get_from_session(common.KEY_CACHED_SIMULATOR_TREE)
        if cached_simulator_tree is None:
            cached_simulator_tree = self.flow_service.prepare_adapter(common.get_current_project().id,
                                                                      self.cached_simulator_algorithm)
            common.add2session(common.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree)
        return copy.deepcopy(cached_simulator_tree)


    @expose_page
    @settings
    @context_selected
    def index(self):
        """Get on burst main page"""
        # todo : reuse load_burst here for consistency.
        template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit",
                                      baseUrl=TvbProfile.current.web.BASE_URL,
                                      includedResources='project/included_resources')
        portlets_list = self.burst_service.get_available_portlets()
        session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_stored_burst is None or session_stored_burst.id is None:
            if session_stored_burst is None:
                session_stored_burst = self.burst_service.new_burst_configuration(common.get_current_project().id)
                common.add2session(common.KEY_BURST_CONFIG, session_stored_burst)

            adapter_interface = self.cached_simulator_input_tree
            if session_stored_burst is not None:
                current_data = session_stored_burst.get_all_simulator_values()[0]
                adapter_interface = InputTreeManager.fill_defaults(adapter_interface, current_data, True)
                ### Add simulator tree to session to be available in filters
                self.context.add_adapter_to_session(self.cached_simulator_algorithm, adapter_interface, current_data)
            template_specification['inputList'] = adapter_interface

        selected_portlets = session_stored_burst.update_selected_portlets()
        template_specification['burst_list'] = self.burst_service.get_available_bursts(common.get_current_project().id)
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(selected_portlets)
        template_specification['draw_hidden_ranges'] = True
        template_specification['burstConfig'] = session_stored_burst

        ### Prepare PSE available metrics
        ### We put here all available algorithms, because the metrics select area is a generic one, 
        ### and not loaded with every Burst Group change in history.
        algorithm = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        adapter_instance = ABCAdapter.build_adapter(algorithm)
        if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'):
            template_specification['available_metrics'] = [metric_name for metric_name
                                                           in adapter_instance.available_algorithms.keys()]
        else:
            template_specification['available_metrics'] = []

        template_specification[common.KEY_PARAMETERS_CONFIG] = False
        template_specification[common.KEY_SECTION] = 'burst'
        return self.fill_default_attributes(template_specification)


    @expose_fragment('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        bursts = self.burst_service.get_available_bursts(common.get_current_project().id)
        self.burst_service.populate_burst_disk_usage(bursts)
        return {'burst_list': bursts,
                'selectedBurst': session_burst.id}


    @cherrypy.expose
    @handle_error(redirect=False)
    def get_selected_burst(self):
        """
        Return the burst that is currently stored in session.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        if session_burst.id:
            return str(session_burst.id)
        else:
            return 'None'


    @expose_fragment('burst/portlet_configure_parameters')
    def get_portlet_configurable_interface(self, index_in_tab):
        """
        From the position given by the tab index and the index from that tab, 
        get the portlet configuration and build the configurable interface
        for that portlet.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_index = burst_config.selected_tab
        portlet_config = burst_config.tabs[tab_index].portlets[int(index_in_tab)]
        portlet_interface = self.burst_service.build_portlet_interface(portlet_config, common.get_current_project().id)

        full_portlet_input_tree = []
        for entry in portlet_interface:
            full_portlet_input_tree.extend(entry.interface)
        self.context.add_portlet_to_session(full_portlet_input_tree)

        portlet_interface = {"adapters_list": portlet_interface,
                             common.KEY_PARAMETERS_CONFIG: False,
                             common.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION}
        return self.fill_default_attributes(portlet_interface)


    @expose_fragment('burst/portlets_preview')
    def portlet_tab_display(self, **data):
        """
        When saving a new configuration of tabs, check if any of the old 
        portlets are still present, and if that is the case use their 
        parameters configuration. 
        
        For all the new portlets add entries in the burst configuration. 
        Also remove old portlets that are no longer saved.
        """
        tab_portlets_list = json.loads(data['tab_portlets_list'])
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        selected_tab_idx = burst_config.selected_tab
        for tab_idx in xrange(len(tab_portlets_list)):
            current_tab = burst_config.tabs[tab_idx]
            ### When configuration already exists, and new portlets          #####
            ### are selected, first check if any configuration was saved for #####
            ### each portlet and if that is the case, use it. If none is present #
            ### create a new one.                                              ###
            for idx_in_tab in xrange(len(tab_portlets_list[tab_idx])):
                portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0]
                portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1]
                if portlet_id >= 0:
                    saved_config = current_tab.portlets[idx_in_tab]
                    if saved_config is None or saved_config.portlet_id != portlet_id:
                        current_tab.portlets[idx_in_tab] = self.burst_service.new_portlet_configuration(portlet_id,
                                                                                    tab_idx, idx_in_tab, portlet_name)
                    else:
                        saved_config.visualizer.ui_name = portlet_name
                else:
                    current_tab.portlets[idx_in_tab] = None
            #For generating the HTML get for each id the corresponding portlet
        selected_tab_portlets = []
        saved_selected_tab = burst_config.tabs[selected_tab_idx]
        for portlet in saved_selected_tab.portlets:
            if portlet:
                portlet_id = int(portlet.portlet_id)
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_id)
                portlet_entity.name = portlet.name
                selected_tab_portlets.append(portlet_entity)

        return {'portlet_tab_list': selected_tab_portlets}


    @expose_fragment('burst/portlets_preview')
    def get_configured_portlets(self):
        """
        Return the portlets for one given tab. This is used when changing
        from tab to tab and selecting which portlets will be displayed.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        if burst_config is None:
            return {'portlet_tab_list': []}

        tab_idx = burst_config.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets:
            if portlet_cfg is not None:
                portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id)
                portlet_entity.name = portlet_cfg.name
                tab_portlet_list.append(portlet_entity)
        return {'portlet_tab_list': tab_portlet_list}


    @expose_json
    def change_selected_tab(self, tab_nr):
        """
        Set :param tab_nr: as the currently selected tab in the stored burst
        configuration. 
        """
        common.get_from_session(common.KEY_BURST_CONFIG).selected_tab = int(tab_nr)


    @expose_json
    def get_portlet_session_configuration(self):
        """
        Get the current configuration of portlets stored in session for this burst,
        as a json.
        """
        burst_entity = common.get_from_session(common.KEY_BURST_CONFIG)
        returned_configuration = burst_entity.update_selected_portlets()
        return returned_configuration


    @cherrypy.expose
    @handle_error(redirect=False)
    def save_parameters(self, index_in_tab, **data):
        """
        Save parameters
        
        :param tab_nr: the index of the selected tab
        :param index_in_tab: the index of the configured portlet in the selected tab
        :param data: the {"portlet_parameters": json_string} Where json_string is a Jsonified dictionary
            {"name": value}, representing the configuration of the current portlet
        
        Having these inputs, current method updated the configuration of the portlet in the
        corresponding tab position form the burst configuration in session.
        """
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        tab_nr = burst_config.selected_tab
        old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(index_in_tab)]
        data = json.loads(data['portlet_parameters'])

        # Replace all void entries with 'None'
        for entry in data:
            if data[entry] == '':
                data[entry] = None

        need_relaunch = self.burst_service.update_portlet_configuration(old_portlet_config, data)
        if need_relaunch:
            #### Reset Burst Configuration into an entity not persisted (id = None for all)
            common.add2session(common.KEY_BURST_CONFIG, burst_config.clone())
            return "relaunchView"
        else:
            self.workflow_service.store_workflow_step(old_portlet_config.visualizer)
            return "noRelaunch"


    @expose_json
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        validation_result = self._is_burst_name_ok(burst_name)
        if validation_result is True:
            self.burst_service.rename_burst(burst_id, burst_name)
            return {'success': "Simulation successfully renamed!"}
        else:
            return {'error': validation_result}


    @expose_json
    def launch_burst(self, launch_mode, burst_name, **data):
        """
        Do the actual burst launch, using the configuration saved in current session.
        :param launch_mode: new/branch/continue
        :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x)
        :param data: kwargs for simulation input parameters.
        """
        data = json.loads(data['simulator_parameters'])
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)

        ## Validate new burst-name
        if launch_mode == LAUNCH_NEW and burst_name != 'none_undefined':
            validation_result = self._is_burst_name_ok(burst_name)
            if validation_result is True:
                burst_config.name = burst_name
            else:
                return {'error': validation_result}

        ## Fill all parameters 
        user_id = common.get_logged_user().id
        data[common.KEY_ADAPTER] = self.cached_simulator_algorithm.id
        burst_config.update_simulator_configuration(data)
        burst_config.fk_project = common.get_current_project().id

        ## Do the asynchronous launch
        try:
            burst_id, burst_name = self.burst_service.launch_burst(burst_config, 0, self.cached_simulator_algorithm.id,
                                                                   user_id, launch_mode)
            return {'id': burst_id, 'name': burst_name}
        except BurstServiceException, e:
            self.logger.exception("Could not launch burst!")
            return {'error': e.message}
Пример #25
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.workflow_service = WorkflowService()
     self.file_helper = FilesHelper()
Пример #26
0
 def __init__(self):
     self.operation_service = OperationService()
     self.workflow_service = WorkflowService()
     self.logger = get_logger(self.__class__.__module__)
     self.cache_portlet_configurators = {}