Beispiel #1
0
    def __init__(self):
        base.BaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree, Algorithm and AlgorithmGroup, for performance issues.
        algorithm, self.cached_simulator_algo_group = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.cached_simulator_algorithm_id = algorithm.id
Beispiel #2
0
 def setUp(self):
     #        self.clean_database()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.old_config_file = cfg.CURRENT_DIR
     cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        algorithm = curent_operation.algorithm
        algorithm_group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
        LOGGER.debug("Importing Algorithm: " + str(algorithm_group.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(algorithm_group)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst,
                                                  error=True,
                                                  error_message=str(excep))
Beispiel #4
0
 def setUp(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     #        self.clean_database()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.old_config_file = cfg.CURRENT_DIR
     cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        #Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        user_name_label = self.user_name_label
        run_params = [config().get_python_path(), '-m', 'tvb.core.cluster_launcher', str(operation_id), user_name_label]

        if tvb_profile.CURRENT_SELECTED_PROFILE is not None:
            run_params.extend([tvb_profile.SUBPARAM_PROFILE, tvb_profile.CURRENT_SELECTED_PROFILE])

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:
            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE)
            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = model.OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure with exit code: %s" % returned)

                operation.mark_complete(model.STATUS_ERROR,
                                        "Operation failed unexpectedly! Probably segmentation fault.")
                dao.store_entity(operation)

                burst_entity = dao.get_burst_for_operation_id(self.operation_id)
                if burst_entity:
                    message = "Error on burst operation! Probably segmentation fault."
                    WorkflowService().mark_burst_finished(burst_entity, error=True, error_message=message)

            del launched_process

        #Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
Beispiel #6
0
class BurstController(base.BaseController):
    """
    Controller class for Burst-Pages.
    """
    def __init__(self):
        base.BaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree, Algorithm and AlgorithmGroup, for performance issues.
        algorithm, self.cached_simulator_algo_group = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.cached_simulator_algorithm_id = algorithm.id

    @property
    @context_selected()
    def cached_simulator_input_tree(self):
        """
        Cache Simulator's input tree, for performance issues.
        Anyway, without restart, the introspected tree will not be different on multiple executions.
        :returns: Simulator's Input Tree (copy from cache or just loaded)
        """
        cached_simulator_tree = base.get_from_session(
            base.KEY_CACHED_SIMULATOR_TREE)
        if cached_simulator_tree is None:
            cached_simulator_tree = self.flow_service.prepare_adapter(
                base.get_current_project().id,
                self.cached_simulator_algo_group)[1]
            base.add2session(base.KEY_CACHED_SIMULATOR_TREE,
                             cached_simulator_tree)
        return copy.deepcopy(cached_simulator_tree)

    @cherrypy.expose
    @using_template('base_template')
    @base.settings()
    @logged()
    @context_selected()
    def index(self):
        """Get on burst main page"""
        template_specification = dict(
            mainContent="burst/main_burst",
            title="Simulation Cockpit",
            baseUrl=cfg.BASE_URL,
            includedResources='project/included_resources')
        portlets_list = self.burst_service.get_available_portlets()
        session_stored_burst = base.get_from_session(base.KEY_BURST_CONFIG)
        if session_stored_burst is None or session_stored_burst.id is None:
            if session_stored_burst is None:
                session_stored_burst = self.burst_service.new_burst_configuration(
                    base.get_current_project().id)
                base.add2session(base.KEY_BURST_CONFIG, session_stored_burst)

            adapter_interface = self.cached_simulator_input_tree
            if session_stored_burst is not None:
                current_data = session_stored_burst.get_all_simulator_values(
                )[0]
                adapter_interface = ABCAdapter.fill_defaults(
                    adapter_interface, current_data, True)
                ### Add simulator tree to session to be available in filters
                self.context.add_adapter_to_session(
                    self.cached_simulator_algo_group, adapter_interface,
                    current_data)
            template_specification['inputList'] = adapter_interface

        selected_portlets = session_stored_burst.update_selected_portlets()
        template_specification[
            'burst_list'] = self.burst_service.get_available_bursts(
                base.get_current_project().id)
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(
            selected_portlets)
        template_specification['draw_hidden_ranges'] = True
        template_specification['burstConfig'] = session_stored_burst

        ### Prepare PSE available metrics
        ### We put here all available algorithms, because the metrics select area is a generic one,
        ### and not loaded with every Burst Group change in history.
        algo_group = self.flow_service.get_algorithm_by_module_and_class(
            MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)[1]
        adapter_instance = ABCAdapter.build_adapter(algo_group)
        if adapter_instance is not None and hasattr(adapter_instance,
                                                    'available_algorithms'):
            template_specification['available_metrics'] = [
                metric_name for metric_name in
                adapter_instance.available_algorithms.keys()
            ]
        else:
            template_specification['available_metrics'] = []

        template_specification[base.KEY_PARAMETERS_CONFIG] = False
        template_specification[base.KEY_SECTION] = 'burst'
        return self.fill_default_attributes(template_specification)

    @cherrypy.expose
    @using_template('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = base.get_from_session(base.KEY_BURST_CONFIG)
        return {
            'burst_list':
            self.burst_service.get_available_bursts(
                base.get_current_project().id),
            'selectedBurst':
            session_burst.id
        }

    @cherrypy.expose
    @ajax_call(False)
    def get_selected_burst(self):
        """
        Return the burst that is currently stored in session.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = base.get_from_session(base.KEY_BURST_CONFIG)
        if session_burst.id:
            return str(session_burst.id)
        else:
            return 'None'

    @cherrypy.expose
    @using_template('burst/portlet_configure_parameters')
    def get_portlet_configurable_interface(self, index_in_tab):
        """
        From the position given by the tab index and the index from that tab, 
        get the portlet configuration and build the configurable interface
        for that portlet.
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        tab_index = burst_config.selected_tab
        portlet_config = burst_config.tabs[tab_index].portlets[int(
            index_in_tab)]
        portlet_interface = self.burst_service.build_portlet_interface(
            portlet_config,
            base.get_current_project().id)

        full_portlet_input_tree = []
        for entry in portlet_interface:
            full_portlet_input_tree.extend(entry.interface)
        self.context.add_portlet_to_session(full_portlet_input_tree)

        portlet_interface = {
            "adapters_list": portlet_interface,
            base.KEY_PARAMETERS_CONFIG: False,
            base.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION
        }
        return self.fill_default_attributes(portlet_interface)

    @cherrypy.expose
    @using_template('burst/portlets_preview')
    def portlet_tab_display(self, **data):
        """
        When saving a new configuration of tabs, check if any of the old 
        portlets are still present, and if that is the case use their 
        parameters configuration. 
        
        For all the new portlets add entries in the burst configuration. 
        Also remove old portlets that are no longer saved.
        """
        tab_portlets_list = json.loads(data['tab_portlets_list'])
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        selected_tab_idx = burst_config.selected_tab
        for tab_idx in xrange(len(tab_portlets_list)):
            current_tab = burst_config.tabs[tab_idx]
            ### When configuration already exists, and new portlets          #####
            ### are selected, first check if any configuration was saved for #####
            ### each portlet and if that is the case, use it. If none is present #
            ### create a new one.                                              ###
            for idx_in_tab in xrange(len(tab_portlets_list[tab_idx])):
                portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0]
                portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1]
                if portlet_id >= 0:
                    saved_config = current_tab.portlets[idx_in_tab]
                    if saved_config is None or saved_config.portlet_id != portlet_id:
                        current_tab.portlets[
                            idx_in_tab] = self.burst_service.new_portlet_configuration(
                                portlet_id, tab_idx, idx_in_tab, portlet_name)
                    else:
                        saved_config.visualizer.ui_name = portlet_name
                else:
                    current_tab.portlets[idx_in_tab] = None
            #For generating the HTML get for each id the corresponding portlet
        selected_tab_portlets = []
        saved_selected_tab = burst_config.tabs[selected_tab_idx]
        for portlet in saved_selected_tab.portlets:
            if portlet:
                portlet_id = int(portlet.portlet_id)
                portlet_entity = self.burst_service.get_portlet_by_id(
                    portlet_id)
                portlet_entity.name = portlet.name
                selected_tab_portlets.append(portlet_entity)

        return {'portlet_tab_list': selected_tab_portlets}

    @cherrypy.expose
    @using_template('burst/portlets_preview')
    def get_configured_portlets(self):
        """
        Return the portlets for one given tab. This is used when changing
        from tab to tab and selecting which portlets will be displayed.
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        if burst_config is None:
            return {'portlet_tab_list': []}

        tab_idx = burst_config.selected_tab
        tab_portlet_list = []
        for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets:
            if portlet_cfg is not None:
                portlet_entity = self.burst_service.get_portlet_by_id(
                    portlet_cfg.portlet_id)
                portlet_entity.name = portlet_cfg.name
                tab_portlet_list.append(portlet_entity)
        return {'portlet_tab_list': tab_portlet_list}

    @cherrypy.expose
    @ajax_call()
    def change_selected_tab(self, tab_nr):
        """
        Set :param tab_nr: as the currently selected tab in the stored burst
        configuration. 
        """
        base.get_from_session(base.KEY_BURST_CONFIG).selected_tab = int(tab_nr)

    @cherrypy.expose
    @ajax_call()
    def get_portlet_session_configuration(self):
        """
        Get the current configuration of portlets stored in session for this burst,
        as a json.
        """
        burst_entity = base.get_from_session(base.KEY_BURST_CONFIG)
        returned_configuration = burst_entity.update_selected_portlets()
        return returned_configuration

    @cherrypy.expose
    @ajax_call(False)
    def save_parameters(self, index_in_tab, **data):
        """
        Save parameters
        
        :param tab_nr: the index of the selected tab
        :param index_in_tab: the index of the configured portlet in the selected tab
        :param data: the {name:value} dictionary configuration of the current portlet
        
        Having these inputs, update the configuration of the portletin the 
        corresponding tab position form the burst configuration .
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)
        tab_nr = burst_config.selected_tab
        old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(
            index_in_tab)]

        # Replace all void entries with 'None'
        for entry in data:
            if data[entry] == '':
                data[entry] = None

        need_relaunch = self.burst_service.update_portlet_configuration(
            old_portlet_config, data)
        if need_relaunch:
            #### Reset Burst Configuration into an entity not persisted (id = None for all)
            base.add2session(base.KEY_BURST_CONFIG, burst_config.clone())
            return "relaunchView"
        else:
            self.workflow_service.store_workflow_step(
                old_portlet_config.visualizer)
            return "noRelaunch"

    @cherrypy.expose
    @ajax_call()
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        self._validate_burst_name(burst_name)
        self.burst_service.rename_burst(burst_id, burst_name)

    @cherrypy.expose
    @ajax_call()
    def launch_burst(self, launch_mode, burst_name, **data):
        """
        Do the actual burst launch, using the configuration saved in current session.
        :param launch_mode: new/branch/continue
        :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x)
        :param data: kwargs for simulation input parameters.
        """
        burst_config = base.get_from_session(base.KEY_BURST_CONFIG)

        ## Validate new burst-name
        if burst_name != 'none_undefined':
            self._validate_burst_name(burst_name)
            burst_config.name = burst_name

        ## Fill all parameters
        user_id = base.get_logged_user().id
        data[base.KEY_ADAPTER] = self.cached_simulator_algorithm_id
        burst_config.update_simulator_configuration(data)
        burst_config.fk_project = base.get_current_project().id

        ## Do the asynchronous launch
        burst_id, burst_name = self.burst_service.launch_burst(
            burst_config, 0, self.cached_simulator_algorithm_id, user_id,
            launch_mode)
        return [burst_id, burst_name]

    @cherrypy.expose
    @ajax_call()
    def load_burst(self, burst_id):
        """
        Given a clicked burst from the history and the selected tab, load all 
        the required data from that burst. Return a value specifying if it was a result
        of a range launch (OperationGroup) or not.
        """
        try:
            old_burst = base.get_from_session(base.KEY_BURST_CONFIG)
            burst, group_gid = self.burst_service.load_burst(burst_id)
            burst.selected_tab = old_burst.selected_tab
            base.add2session(base.KEY_BURST_CONFIG, burst)
            return {
                'status': burst.status,
                'group_gid': group_gid,
                'selected_tab': burst.selected_tab
            }
        except Exception, excep:
            ### Most probably Burst was removed. Delete it from session, so that client
            ### has a good chance to get a good response on refresh
            self.logger.error(excep)
            base.remove_from_session(base.KEY_BURST_CONFIG)
            raise excep
Beispiel #7
0
class WorkflowTest(TransactionalTestCase):
    """
    Test that workflow conversion methods are valid.
    """
    def setUp(self):
        #        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.old_config_file = cfg.CURRENT_DIR
        cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
        self.workflow_service = WorkflowService()
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.flow_service = FlowService()

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.delete_project_folders()
        cfg.CURRENT_DIR = self.old_config_file

    def __create_complex_workflow(self, workflow_step_list):
        """
        Creates a burst with a complex workflow with a given list of workflow steps.
        @param workflow_step_list: a lsit of workflow steps that will be used in the
            creation of a new workflow for a new burst
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())

        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb_test.adapters.testadapter1", "TestAdapterDatatypeInput")[0]
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algo_group.group_category, metadata, **kwargs)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id=self.test_project.id,
            burst_id=burst_config.id,
            simulator_index=0,
            simulator_id=first_step_algorithm.id,
            operations=operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        #fire the first op
        if len(operations) > 0:
            self.operation_service.launch_operation(operations[0].id, False)
        return burst_config.id

    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb_test.adapters.testadapter2.TestAdapter2
                  step2 - tvb_test.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter2",
                                             "TestAdapter2",
                                             static_kwargs={"test2": 2},
                                             step_index=1),
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter1",
                                             "TestAdapter1",
                                             static_kwargs={
                                                 "test1_val1": 1,
                                                 "test1_val2": 1
                                             },
                                             step_index=2)
        ]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_info_for_project(
            self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 2,
            "DataType from second step was not stored.")
        self.assertTrue(stored_datatypes[0][0] == 'Datatype1',
                        "Wrong type was stored.")
        self.assertTrue(stored_datatypes[1][0] == 'Datatype1',
                        "Wrong type was stored.")

        finished, started, error, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didnt start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didnt finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb_test.adapters.testadapter1.TestAdapter1
                  step2 - tvb_test.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter1",
                                             "TestAdapter1",
                                             static_kwargs={
                                                 "test1_val1": 1,
                                                 "test1_val2": 1
                                             },
                                             step_index=1),
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter3",
                                             "TestAdapter3",
                                             dynamic_kwargs={
                                                 "test": {
                                                     wf_cfg.DATATYPE_INDEX_KEY:
                                                     0,
                                                     wf_cfg.STEP_INDEX_KEY: 1
                                                 }
                                             },
                                             step_index=2)
        ]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_info_for_project(
            self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 3,
            "DataType from all step were not stored.")
        for result_row in stored_datatypes:
            self.assertTrue(result_row[0] in ['Datatype1', 'Datatype2'],
                            "Wrong type was stored.")

        finished, started, error, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didn't start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didn't finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_configuration2workflow(self):
        """
        Test that building a WorflowStep from a WorkflowStepConfiguration. Make sure all the data is 
        correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index.
        """
        workflow_step = TestFactory.create_workflow_step(
            "tvb_test.adapters.testadapter1",
            "TestAdapter1",
            static_kwargs={"static_param": "test"},
            dynamic_kwargs={
                "dynamic_param": {
                    wf_cfg.STEP_INDEX_KEY: 0,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            step_index=1,
            base_step=5)
        self.assertEqual(workflow_step.step_index, 1,
                         "Wrong step index in created workflow step.")
        self.assertEqual(workflow_step.static_param, {'static_param': 'test'},
                         'Different static parameters on step.')
        self.assertEqual(
            workflow_step.dynamic_param, {
                'dynamic_param': {
                    wf_cfg.STEP_INDEX_KEY: 5,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            "Dynamic parameters not saved properly, or base workflow index not added to step index."
        )

    def test_create_workflow(self):
        """
        Test that a workflow with all the associated workflow steps is actually created.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter2",
                                             "TestAdapter2",
                                             static_kwargs={"test2": 2},
                                             step_index=1),
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter1",
                                             "TestAdapter1",
                                             static_kwargs={
                                                 "test1_val1": 1,
                                                 "test1_val2": 1
                                             },
                                             step_index=2)
        ]
        burst_id = self.__create_complex_workflow(workflow_step_list)
        workflow_entities = dao.get_workflows_for_burst(burst_id)
        self.assertTrue(
            len(workflow_entities) == 1,
            "For some reason workflow was not stored in database.")
        workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
        self.assertEqual(len(workflow_steps),
                         len(workflow_step_list) + 1,
                         "Wrong number of workflow steps created.")
 def __init__(self):
     self.operation_service = OperationService()
     self.workflow_service = WorkflowService()
     self.logger = get_logger(self.__class__.__module__)
class BurstService():
    """
    Service layer for Burst related entities.
    """
    def __init__(self):
        self.operation_service = OperationService()
        self.workflow_service = WorkflowService()
        self.logger = get_logger(self.__class__.__module__)

    def build_portlet_interface(self, portlet_configuration, project_id):
        """
        From a portlet_id and a project_id, first build the portlet
        entity then get it's configurable interface. 
        
        :param portlet_configuration: a portlet configuration entity. It holds at the
            least the portlet_id, and in case any default parameters were saved
            they can be rebuilt from the analyzers // visualizer parameters
        :param project_id: the id of the current project   
            
        :returns: the portlet interface will be of the following form::
            [{'interface': adapter_interface, 
            'prefix': prefix_for_parameter_names, 
            'subalg': {algorithm_field_name: default_algorithm_value},
            'algo_group': algorithm_group,
            'alg_ui_name': displayname},
            ......]
            A list of dictionaries for each adapter that makes up the portlet.
            
        """
        portlet_entity = dao.get_portlet_by_id(
            portlet_configuration.portlet_id)
        if portlet_entity is None:
            raise InvalidPortletConfiguration(
                "No portlet entity located in database with id=%s. "
                "Portlet configuration %s is not valid." %
                (portlet_configuration.portlet_id, portlet_configuration))
        portlet_configurer = PortletConfigurer(portlet_entity)
        portlet_interface = portlet_configurer.get_configurable_interface()
        self.logger.debug("Created interface for portlet " +
                          str([portlet_entity]))

        for adapter_conf in portlet_interface:
            interface = adapter_conf.interface
            interface = FlowService().prepare_parameters(
                interface, project_id, adapter_conf.group.fk_category)
            interface = ABCAdapter.prepare_param_names(interface,
                                                       adapter_conf.prefix)
            adapter_conf.interface = interface

        portlet_configurer.update_default_values(portlet_interface,
                                                 portlet_configuration)
        portlet_configurer.prefix_adapters_parameters(portlet_interface)

        return portlet_interface

    @staticmethod
    def update_portlet_configuration(portlet_configuration,
                                     submited_parameters):
        """
        :param portlet_configuration: the portlet configuration that needs to be updated
        :param submited_parameters: a list of parameters as submitted from the UI. This 
            is a dictionary in the form : 
            {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}}
            
        All names are prefixed with adapter specific generated prefix.
        """
        portlet_entity = dao.get_portlet_by_id(
            portlet_configuration.portlet_id)
        portlet_configurer = PortletConfigurer(portlet_entity)
        return portlet_configurer.update_portlet_configuration(
            portlet_configuration, submited_parameters)

    @staticmethod
    def new_burst_configuration(project_id):
        """
        Return a new burst configuration entity with all the default values.
        """
        burst_configuration = model.BurstConfiguration(project_id)
        burst_configuration.selected_tab = 0
        BurstService.set_default_portlets(burst_configuration)
        return burst_configuration

    @staticmethod
    def set_default_portlets(burst_configuration):
        """
        Sets the default portlets for the specified burst configuration.
        The default portlets are specified in the __init__.py script from tvb root.
        """
        for tab_idx, value in DEFAULT_PORTLETS.items():
            for sel_idx, portlet_identifier in value.items():
                portlet = BurstService.get_portlet_by_identifier(
                    portlet_identifier)
                if portlet is not None:
                    portlet_configuration = BurstService.new_portlet_configuration(
                        portlet.id, tab_idx, sel_idx,
                        portlet.algorithm_identifier)
                    burst_configuration.set_portlet(tab_idx, sel_idx,
                                                    portlet_configuration)

    @staticmethod
    def _store_burst_config(burst_config):
        """
        Store a burst configuration entity.
        """
        burst_config.prepare_before_save()
        saved_entity = dao.store_entity(burst_config)
        return saved_entity.id

    @staticmethod
    def get_available_bursts(project_id):
        """
        Return all the burst for the current project.
        """
        bursts = dao.get_bursts_for_project(
            project_id, page_end=MAX_BURSTS_DISPLAYED) or []
        for burst in bursts:
            burst.prepare_after_load()
        return bursts

    @staticmethod
    def rename_burst(burst_id, new_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.name = new_name
        dao.store_entity(burst)

    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst,
                                                       burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(
                    operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid

    @staticmethod
    def __populate_tabs_from_workflow(burst_entity, workflow):
        """
        Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations
        generated from the steps of the workflow.
        """
        visualizers = dao.get_visualization_steps(workflow.id)
        for entry in visualizers:
            ## For each visualize step, also load all of the analyze steps.
            portlet_cfg = PortletConfiguration(entry.fk_portlet)
            portlet_cfg.set_visualizer(entry)
            analyzers = dao.get_workflow_steps_for_position(
                entry.fk_workflow, entry.tab_index, entry.index_in_tab)
            portlet_cfg.set_analyzers(analyzers)
            burst_entity.tabs[entry.tab_index].portlets[
                entry.index_in_tab] = portlet_cfg
        return burst_entity

    def load_tab_configuration(self, burst_entity, op_id):
        """
        Given a burst entity and an operation id, find the workflow to which the op_id
        belongs and the load the burst_entity's tab configuration with those workflow steps.
        """
        originating_workflow = dao.get_workflow_for_operation_id(op_id)
        burst_entity = self.__populate_tabs_from_workflow(
            burst_entity, originating_workflow)
        return burst_entity

    @staticmethod
    def new_portlet_configuration(portlet_id,
                                  tab_nr=-1,
                                  index_in_tab=-1,
                                  portlet_name='Default'):
        """
        Return a new portlet configuration entitiy with default parameters.
        
        :param portlet_id: the id of the portlet for which a configuration will
            be stored
        :param tab_nr: the index of the currently selected tab
        :param index_in_tab: the index from the currently selected tab
        
        """
        portlet_entity = dao.get_portlet_by_id(portlet_id)
        if portlet_entity is None:
            raise InvalidPortletConfiguration(
                "No portlet entity located in database with id=%s." %
                portlet_id)
        portlet_configurer = PortletConfigurer(portlet_entity)
        configuration = portlet_configurer.create_new_portlet_configuration(
            portlet_name)
        for wf_step in configuration.analyzers:
            wf_step.tab_index = tab_nr
            wf_step.index_in_tab = index_in_tab
        configuration.visualizer.tab_index = tab_nr
        configuration.visualizer.index_in_tab = index_in_tab
        return configuration

    @staticmethod
    def get_available_portlets():
        """
        :returns: a list of all the available portlet entites
        """
        return dao.get_available_portlets()

    @staticmethod
    def get_portlet_by_id(portlet_id):
        """
        :returns: the portlet entity with the id =@portlet_id
        """
        return dao.get_portlet_by_id(portlet_id)

    @staticmethod
    def get_portlet_by_identifier(portlet_identifier):
        """
        :returns: the portlet entity with the algorithm identifier =@portlet_identifier
        """
        return dao.get_portlet_by_identifier(portlet_identifier)

    def launch_burst(self,
                     burst_configuration,
                     simulator_index,
                     simulator_id,
                     user_id,
                     launch_mode='new'):
        """
        Given a burst configuration and all the necessary data do the actual launch.
        
        :param burst_configuration: BurstConfiguration   
        :param simulator_index: the position within the workflows step list that the simulator will take. This is needed
            so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from.
        :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo
            group and category that are then passed to the launcher's prepare_operation method.
        :param user_id: the id of the user that launched this burst
        :param launch_mode: new/branch/continue
        """
        ## 1. Prepare BurstConfiguration entity
        if launch_mode == 'new':
            ## Fully new entity for new simulation
            burst_config = burst_configuration.clone()
            if burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                burst_config.name = 'simulation_' + str(new_id)
        else:
            ## Branch or Continue simulation
            burst_config = burst_configuration
            simulation_state = dao.get_generic_entity(
                SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS,
                burst_config.id, "fk_parent_burst")
            if simulation_state is None or len(simulation_state) < 1:
                exc = BurstServiceException(
                    "Simulation state not found for burst_id %s" %
                    burst_config.id)
                self.logger.error(exc)
                raise exc

            simulation_state = simulation_state[0]
            burst_config.update_simulation_parameter("simulation_state",
                                                     simulation_state.gid)
            burst_config = burst_configuration.clone()

            count = dao.count_bursts_with_name(burst_config.name,
                                               burst_config.fk_project)
            burst_config.name = burst_config.name + "_" + launch_mode + str(
                count)

        ## 2. Create Operations and do the actual launch
        if launch_mode in ['new', 'branch']:
            ## New Burst entry in the history
            burst_id = self._store_burst_config(burst_config)
            thread = threading.Thread(target=self._async_launch_and_prepare,
                                      kwargs={
                                          'burst_config': burst_config,
                                          'simulator_index': simulator_index,
                                          'simulator_id': simulator_id,
                                          'user_id': user_id
                                      })
            thread.start()
            return burst_id, burst_config.name
        else:
            ## Continue simulation
            ## TODO
            return burst_config.id, burst_config.name

    @transactional
    def _prepare_operations(self, burst_config, simulator_index, simulator_id,
                            user_id):
        """
        Prepare all required operations for burst launch.
        """
        project_id = burst_config.fk_project
        burst_id = burst_config.id
        workflow_step_list = []
        starting_index = simulator_index + 1

        sim_algo = FlowService().get_algorithm_by_identifier(simulator_id)
        metadata = {DataTypeMetaData.KEY_BURST: burst_id}
        launch_data = burst_config.get_all_simulator_values()[0]
        operations, group = self.operation_service.prepare_operations(
            user_id, project_id, sim_algo, sim_algo.algo_group.group_category,
            metadata, **launch_data)
        group_launched = group is not None
        if group_launched:
            starting_index += 1

        for tab in burst_config.tabs:
            for portlet_cfg in tab.portlets:
                ### For each portlet configuration stored, update the step index ###
                ### and also change the dynamic parameters step indexes to point ###
                ### to the simulator outputs.                                     ##
                if portlet_cfg is not None:
                    analyzers = portlet_cfg.analyzers
                    visualizer = portlet_cfg.visualizer
                    for entry in analyzers:
                        entry.step_index = starting_index
                        self.workflow_service.set_dynamic_step_references(
                            entry, simulator_index)
                        workflow_step_list.append(entry)
                        starting_index += 1
                    ### Change the dynamic parameters to point to the last adapter from this portlet execution.
                    visualizer.step_visible = False
                    if len(workflow_step_list) > 0 and isinstance(
                            workflow_step_list[-1], model.WorkflowStep):
                        self.workflow_service.set_dynamic_step_references(
                            visualizer, workflow_step_list[-1].step_index)
                    else:
                        self.workflow_service.set_dynamic_step_references(
                            visualizer, simulator_index)
                    workflow_step_list.append(visualizer)

        if group_launched:
            ###  For a group of operations, make sure the metric for PSE view
            ### is also computed, immediately after the simulation.
            metric_algo, metric_group = FlowService(
            ).get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE,
                                                MEASURE_METRICS_CLASS)
            _, metric_interface = FlowService().prepare_adapter(
                project_id, metric_group)
            dynamics = {}
            for entry in metric_interface:
                # We have a select that should be the dataType and a select multiple with the
                # required metric algorithms to be evaluated. Only dynamic parameter should be
                # the select type.
                if entry[ABCAdapter.KEY_TYPE] == 'select':
                    dynamics[entry[ABCAdapter.KEY_NAME]] = {
                        WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0,
                        WorkflowStepConfiguration.STEP_INDEX_KEY:
                        simulator_index
                    }
            metric_step = model.WorkflowStep(algorithm_id=metric_algo.id,
                                             step_index=simulator_index + 1,
                                             static_param={},
                                             dynamic_param=dynamics)
            metric_step.step_visible = False
            workflow_step_list.insert(0, metric_step)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id, burst_id, simulator_index, simulator_id, operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, user_id, burst_id, project_id,
            group, operations)
        operation_ids = [operation.id for operation in operations]
        return operation_ids

    def _async_launch_and_prepare(self, burst_config, simulator_index,
                                  simulator_id, user_id):
        """
        Prepare operations asynchronously.
        """
        try:
            operation_ids = self._prepare_operations(burst_config,
                                                     simulator_index,
                                                     simulator_id, user_id)
            self.logger.debug("Starting a total of %s workflows" %
                              (len(operation_ids, )))
            wf_errs = 0
            for operation_id in operation_ids:
                try:
                    OperationService().launch_operation(operation_id, True)
                except Exception, excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.workflow_service.mark_burst_finished(
                        burst_config, error=True, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " +
                              str(len(operation_ids) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) +
                              " had error on pre-launch steps")
        except Exception, excep:
            self.logger.error(excep)
            self.workflow_service.mark_burst_finished(burst_config,
                                                      error=True,
                                                      error_message=str(excep))
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.workflow_service = WorkflowService()
     self.file_helper = FilesHelper()