def update_executed_workflow_state(self, operation):
        """
        Used for updating the state of an executed workflow.
        Only if the operation with the specified id has resulted after the execution
        of an ExecutedWorkflowStep than the state of the ExecutedWorkflow
        to which belongs the step will be updated.
        """
        executed_step, _ = self._get_data(operation.id)
        if executed_step is not None:
            if operation.status == model.STATUS_ERROR:
                all_executed_steps = dao.get_workflow_steps(
                    executed_step.fk_workflow)
                for step in all_executed_steps:
                    if step.step_index > executed_step.step_index:
                        self.logger.debug(
                            "Marking unreached operation %s with error." %
                            step.fk_operation)
                        unreached_operation = dao.get_operation_by_id(
                            step.fk_operation)
                        self.persist_operation_state(
                            unreached_operation, model.STATUS_ERROR,
                            "Blocked by failure in step %s with message: \n\n%s."
                            % (executed_step.step_index,
                               operation.additional_info))

            workflow = dao.get_workflow_by_id(executed_step.fk_workflow)
            burst = dao.get_burst_by_id(workflow.fk_burst)
            self.mark_burst_finished(burst,
                                     error_message=operation.additional_info)
            dao.store_entity(burst)
Esempio n. 2
0
    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst,
                                                       burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(
                    operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid
Esempio n. 3
0
    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid
Esempio n. 4
0
    def test_launch_burst_invalid_portlet_analyzer_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class('tvb.tests.framework.adapters.testadapter1',
                                                                      'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) and int(test1_val2) so this should be valid
        burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
        kwargs_replica = {'test1_val1': '1', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)
        #Portlet analyzer tries to do int(input) which should fail
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': 'asa'}
        self.burst_service.update_portlet_configuration(portlet_configuration, declared_overwrites)
        burst_config.tabs[0].portlets[0] = portlet_configuration

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        burst_config = self._wait_for_burst(burst_config, error_expected=True)

        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_steps = dao.get_workflow_steps(burst_wf.id)
        self.assertTrue(len(wf_steps) == 2,
                        "Should have exactly 2 wf steps. One for 'simulation' one for portlet analyze operation.")
        simulator_op = dao.get_operation_by_id(wf_steps[0].fk_operation)
        self.assertEqual(model.STATUS_FINISHED, simulator_op.status,
                         "First operation should be simulator which should have 'finished' status.")
        portlet_analyze_op = dao.get_operation_by_id(wf_steps[1].fk_operation)
        self.assertEqual(portlet_analyze_op.status, model.STATUS_ERROR,
                         "Second operation should be portlet analyze step which should have 'error' status.")
Esempio n. 5
0
 def test_load_tab_configuration(self):
     """
     Create a burst with some predefined portlets in some known positions. Check that the
     load_tab_configuration method does what it is expected, and we get the portlets in the
     corresponding tab positions.
     """
     burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
     SIMULATOR_MODULE = 'tvb.tests.framework.adapters.testadapter1'
     SIMULATOR_CLASS = 'TestAdapter1'
     algo_id = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS).id
     kwargs_replica = {'test1_val1': '0', 'test1_val2': '0'}
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     # Add test_portlet to positions (0,0), (0,1) and (1,0)
     tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
     self._add_portlets_to_burst(burst_config, tab_config)
     burst_config.update_simulator_configuration(kwargs_replica)
     burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
     burst_config = dao.get_burst_by_id(burst_id)
     burst_config = self._wait_for_burst(burst_config)
     burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
     wf_step = dao.get_workflow_steps(burst_wf.id)[0]
     burst_config.prepare_after_load()
     for tab in burst_config.tabs:
         for portlet in tab.portlets:
             self.assertTrue(portlet is None, "Before loading the tab configuration all portlets should be none.")
     burst_config = self.burst_service.load_tab_configuration(burst_config, wf_step.fk_operation)
     for tab_idx, tab in enumerate(burst_config.tabs):
         for portlet_idx, portlet in enumerate(tab.portlets):
             if (tab_idx == 0 and portlet_idx in [0, 1]) or (tab_idx == 1 and portlet_idx == 0):
                 self.assertTrue(portlet is not None, "portlet gonfiguration not set")
                 self.assertEqual(test_portlet.id, portlet.portlet_id, "Unexpected portlet entity loaded.")
             else:
                 self.assertTrue(portlet is None, "Before loading the tab configuration all portlets should be none")
 def test_create_workflow(self):
     """
     Test that a workflow with all the associated workflow steps is actually created.
     """
     workflow_step_list = [
         TestFactory.create_workflow_step(
             "tvb.tests.framework.adapters.testadapter2",
             "TestAdapter2",
             step_index=1,
             static_kwargs={"test2": 2}),
         TestFactory.create_workflow_step(
             "tvb.tests.framework.adapters.testadapter1",
             "TestAdapter1",
             step_index=2,
             static_kwargs={
                 "test1_val1": 1,
                 "test1_val2": 1
             })
     ]
     burst_id = self.__create_complex_workflow(workflow_step_list)
     workflow_entities = dao.get_workflows_for_burst(burst_id)
     self.assertTrue(
         len(workflow_entities) == 1,
         "For some reason workflow was not stored in database.")
     workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
     self.assertEqual(len(workflow_steps),
                      len(workflow_step_list) + 1,
                      "Wrong number of workflow steps created.")
 def test_create_workflow(self):
     """
     Test that a workflow with all the associated workflow steps is actually created.
     """
     workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                            "TestAdapter2", step_index=1,
                                                            static_kwargs={"test2": 2}),
                           TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                            "TestAdapter1", step_index=2,
                                                            static_kwargs={"test1_val1": 1, "test1_val2": 1})]
     burst_id = self.__create_complex_workflow(workflow_step_list)
     workflow_entities = dao.get_workflows_for_burst(burst_id)
     assert  len(workflow_entities) == 1, "For some reason workflow was not stored in database."
     workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
     assert  len(workflow_steps) == len(workflow_step_list) + 1, "Wrong number of workflow steps created."
Esempio n. 8
0
    def stop_burst(self, burst_entity):
        """
        Stop all the entities for the current burst and set the burst status to canceled.
        """
        burst_wfs = dao.get_workflows_for_burst(burst_entity.id)
        any_stopped = False
        for workflow in burst_wfs:
            wf_steps = dao.get_workflow_steps(workflow.id)
            for step in wf_steps:
                if step.fk_operation is not None:
                    self.logger.debug("We will stop operation: %d" % step.fk_operation)
                    any_stopped = self.operation_service.stop_operation(step.fk_operation) or any_stopped

        if any_stopped and burst_entity.status != burst_entity.BURST_CANCELED:
            self.workflow_service.mark_burst_finished(burst_entity, model.BurstConfiguration.BURST_CANCELED)
            return True
        return False
Esempio n. 9
0
    def stop_burst(self, burst_entity):
        """
        Stop all the entities for the current burst and set the burst status to canceled.
        """
        burst_wfs = dao.get_workflows_for_burst(burst_entity.id)
        any_stopped = False
        for workflow in burst_wfs:
            wf_steps = dao.get_workflow_steps(workflow.id)
            for step in wf_steps:
                if step.fk_operation is not None:
                    self.logger.debug("We will stop operation: %d" % step.fk_operation)
                    any_stopped = self.operation_service.stop_operation(step.fk_operation) or any_stopped

        if any_stopped and burst_entity.status != burst_entity.BURST_CANCELED:
            self.workflow_service.mark_burst_finished(burst_entity, model.BurstConfiguration.BURST_CANCELED)
            return True
        return False
Esempio n. 10
0
    def _build_workflow_step_info(workflow):
        """
        For the input workflow, get all workflow steps and return a list with information
        that can be then exported.
        """
        wf_steps = []
        view_steps = []
        for wf_step in dao.get_workflow_steps(workflow.id):

            if wf_step.fk_operation is None or wf_step.fk_algorithm is None:
                # Avoid exporting old form of View Steps.
                LOG.warning("Skipping " + str(workflow) + " " + str(wf_step))
                continue
            # Get all basic information for this workflow step
            wf_step_info = WorkflowStepInformation(wf_step.to_dict()[1])
            # We need to store the gid for the operation since the id might be
            # different in case of a project export / import
            linked_operation = dao.get_operation_by_id(wf_step.fk_operation)
            wf_step_info.set_operation(linked_operation)
            # We also need to keep info about algorithm in the form of module
            # and classname because that id might also be different in case
            # of project export / import.
            linked_algorithm = dao.get_algorithm_by_id(wf_step.fk_algorithm)
            wf_step_info.set_algorithm(linked_algorithm)
            wf_steps.append(wf_step_info)
        for view_step in dao.get_visualization_steps(workflow.id):
            # Get all basic information for this view step
            view_step_info = WorkflowViewStepInformation(
                view_step.to_dict()[1])
            # We need to store portlet identifier, since portlet id might be different
            # in project we are importing into.
            portlet = dao.get_portlet_by_id(view_step.fk_portlet)
            view_step_info.set_portlet(portlet)
            # We also need to keep info about algorithm in the form of module
            # and classname because that id might also be different in case
            # of project export / import.
            linked_algorithm = dao.get_algorithm_by_id(view_step.fk_algorithm)
            view_step_info.set_algorithm(linked_algorithm)
            view_steps.append(view_step_info)
        return wf_steps, view_steps
    def _build_workflow_step_info(workflow):
        """
        For the input workflow, get all workflow steps and return a list with information
        that can be then exported.
        """
        wf_steps = []
        view_steps = []
        for wf_step in dao.get_workflow_steps(workflow.id):

            if wf_step.fk_operation is None or wf_step.fk_algorithm is None:
                # Avoid exporting old form of View Steps.
                LOG.warning("Skipping " + str(workflow) + " " + str(wf_step))
                continue
            # Get all basic information for this workflow step
            wf_step_info = WorkflowStepInformation(wf_step.to_dict()[1])
            # We need to store the gid for the operation since the id might be
            # different in case of a project export / import
            linked_operation = dao.get_operation_by_id(wf_step.fk_operation)
            wf_step_info.set_operation(linked_operation)
            # We also need to keep info about algorithm in the form of module
            # and classname because that id might also be different in case
            # of project export / import.
            linked_algorithm = dao.get_algorithm_by_id(wf_step.fk_algorithm)
            wf_step_info.set_algorithm(linked_algorithm)
            wf_steps.append(wf_step_info)
        for view_step in dao.get_visualization_steps(workflow.id):
            # Get all basic information for this view step
            view_step_info = WorkflowViewStepInformation(view_step.to_dict()[1])
            # We need to store portlet identifier, since portlet id might be different
            # in project we are importing into.
            portlet = dao.get_portlet_by_id(view_step.fk_portlet)
            view_step_info.set_portlet(portlet)
            # We also need to keep info about algorithm in the form of module
            # and classname because that id might also be different in case
            # of project export / import.
            linked_algorithm = dao.get_algorithm_by_id(view_step.fk_algorithm)
            view_step_info.set_algorithm(linked_algorithm)
            view_steps.append(view_step_info)
        return wf_steps, view_steps
    def update_executed_workflow_state(self, operation):
        """
        Used for updating the state of an executed workflow.
        Only if the operation with the specified id has resulted after the execution
        of an ExecutedWorkflowStep than the state of the ExecutedWorkflow
        to which belongs the step will be updated.
        """
        executed_step, _ = self._get_data(operation.id)
        if executed_step is not None:
            if operation.status == model.STATUS_ERROR:
                all_executed_steps = dao.get_workflow_steps(executed_step.fk_workflow)
                for step in all_executed_steps:
                    if step.step_index > executed_step.step_index:
                        self.logger.debug("Marking unreached operation %s with error." % step.fk_operation)
                        unreached_operation = dao.get_operation_by_id(step.fk_operation)
                        self.persist_operation_state(unreached_operation, model.STATUS_ERROR,
                                                     "Blocked by failure in step %s with message: \n\n%s." % (
                                                         executed_step.step_index, operation.additional_info))

            workflow = dao.get_workflow_by_id(executed_step.fk_workflow)
            burst = dao.get_burst_by_id(workflow.fk_burst)
            self.mark_burst_finished(burst, error_message=operation.additional_info)
            dao.store_entity(burst)