class BurstService: """ Service layer for Burst related entities. """ def __init__(self): self.operation_service = OperationService() self.workflow_service = WorkflowService() self.logger = get_logger(self.__class__.__module__) def build_portlet_interface(self, portlet_configuration, project_id): """ From a portlet_id and a project_id, first build the portlet entity then get it's configurable interface. :param portlet_configuration: a portlet configuration entity. It holds at the least the portlet_id, and in case any default parameters were saved they can be rebuilt from the analyzers // visualizer parameters :param project_id: the id of the current project :returns: the portlet interface will be of the following form:: [{'interface': adapter_interface, 'prefix': prefix_for_parameter_names, 'subalg': {algorithm_field_name: default_algorithm_value}, 'algo_group': algorithm_group, 'alg_ui_name': displayname}, ......] A list of dictionaries for each adapter that makes up the portlet. """ portlet_entity = dao.get_portlet_by_id(portlet_configuration.portlet_id) if portlet_entity is None: raise InvalidPortletConfiguration( "No portlet entity located in database with id=%s. " "Portlet configuration %s is not valid." % (portlet_configuration.portlet_id, portlet_configuration) ) portlet_configurer = PortletConfigurer(portlet_entity) portlet_interface = portlet_configurer.get_configurable_interface() self.logger.debug("Created interface for portlet " + str([portlet_entity])) for adapter_conf in portlet_interface: interface = adapter_conf.interface interface = FlowService().prepare_parameters(interface, project_id, adapter_conf.group.fk_category) interface = ABCAdapter.prepare_param_names(interface, adapter_conf.prefix) adapter_conf.interface = interface portlet_configurer.update_default_values(portlet_interface, portlet_configuration) portlet_configurer.prefix_adapters_parameters(portlet_interface) return portlet_interface @staticmethod def update_portlet_configuration(portlet_configuration, submited_parameters): """ :param portlet_configuration: the portlet configuration that needs to be updated :param submited_parameters: a list of parameters as submitted from the UI. This is a dictionary in the form : {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}} All names are prefixed with adapter specific generated prefix. """ portlet_entity = dao.get_portlet_by_id(portlet_configuration.portlet_id) portlet_configurer = PortletConfigurer(portlet_entity) return portlet_configurer.update_portlet_configuration(portlet_configuration, submited_parameters) @staticmethod def new_burst_configuration(project_id): """ Return a new burst configuration entity with all the default values. """ burst_configuration = model.BurstConfiguration(project_id) burst_configuration.selected_tab = 0 BurstService.set_default_portlets(burst_configuration) return burst_configuration @staticmethod def set_default_portlets(burst_configuration): """ Sets the default portlets for the specified burst configuration. The default portlets are specified in the __init__.py script from tvb root. """ for tab_idx, value in DEFAULT_PORTLETS.items(): for sel_idx, portlet_identifier in value.items(): portlet = BurstService.get_portlet_by_identifier(portlet_identifier) if portlet is not None: portlet_configuration = BurstService.new_portlet_configuration( portlet.id, tab_idx, sel_idx, portlet.algorithm_identifier ) burst_configuration.set_portlet(tab_idx, sel_idx, portlet_configuration) @staticmethod def _store_burst_config(burst_config): """ Store a burst configuration entity. """ burst_config.prepare_before_save() saved_entity = dao.store_entity(burst_config) return saved_entity.id @staticmethod def get_available_bursts(project_id): """ Return all the burst for the current project. """ bursts = dao.get_bursts_for_project(project_id, page_size=MAX_BURSTS_DISPLAYED) or [] for burst in bursts: burst.prepare_after_load() return bursts @staticmethod def populate_burst_disk_usage(bursts): """ Adds a disk_usage field to each burst object. The disk usage is computed as the sum of the datatypes generated by a burst """ sizes = dao.compute_bursts_disk_size([b.id for b in bursts]) for b in bursts: b.disk_size = format_bytes_human(sizes[b.id]) @staticmethod def rename_burst(burst_id, new_name): """ Rename the burst given by burst_id, setting it's new name to burst_name. """ burst = dao.get_burst_by_id(burst_id) burst.name = new_name dao.store_entity(burst) def load_burst(self, burst_id): """ :param burst_id: the id of the burst that should be loaded Having this input the method should: - load the entity from the DB - get all the workflow steps for the saved burst id - go trough the visualization workflow steps to create the tab configuration of the burst using the tab_index and index_in_tab fields saved on each workflow_step """ burst = dao.get_burst_by_id(burst_id) burst.prepare_after_load() burst.reset_tabs() burst_workflows = dao.get_workflows_for_burst(burst.id) group_gid = None if len(burst_workflows) == 1: # A simple burst with no range parameters burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0]) elif len(burst_workflows) > 1: # A burst workflow with a range of values, created multiple workflows and need # to launch parameter space exploration with the resulted group self.__populate_tabs_from_workflow(burst, burst_workflows[0]) executed_steps = dao.get_workflow_steps(burst_workflows[0].id) operation = dao.get_operation_by_id(executed_steps[0].fk_operation) if operation.operation_group: workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id) group_gid = workflow_group.gid return burst, group_gid @staticmethod def __populate_tabs_from_workflow(burst_entity, workflow): """ Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations generated from the steps of the workflow. """ visualizers = dao.get_visualization_steps(workflow.id) for entry in visualizers: ## For each visualize step, also load all of the analyze steps. portlet_cfg = PortletConfiguration(entry.fk_portlet) portlet_cfg.set_visualizer(entry) analyzers = dao.get_workflow_steps_for_position(entry.fk_workflow, entry.tab_index, entry.index_in_tab) portlet_cfg.set_analyzers(analyzers) burst_entity.tabs[entry.tab_index].portlets[entry.index_in_tab] = portlet_cfg return burst_entity def load_tab_configuration(self, burst_entity, op_id): """ Given a burst entity and an operation id, find the workflow to which the op_id belongs and the load the burst_entity's tab configuration with those workflow steps. """ originating_workflow = dao.get_workflow_for_operation_id(op_id) burst_entity = self.__populate_tabs_from_workflow(burst_entity, originating_workflow) return burst_entity @staticmethod def new_portlet_configuration(portlet_id, tab_nr=-1, index_in_tab=-1, portlet_name="Default"): """ Return a new portlet configuration entitiy with default parameters. :param portlet_id: the id of the portlet for which a configuration will be stored :param tab_nr: the index of the currently selected tab :param index_in_tab: the index from the currently selected tab """ portlet_entity = dao.get_portlet_by_id(portlet_id) if portlet_entity is None: raise InvalidPortletConfiguration("No portlet entity located in database with id=%s." % portlet_id) portlet_configurer = PortletConfigurer(portlet_entity) configuration = portlet_configurer.create_new_portlet_configuration(portlet_name) for wf_step in configuration.analyzers: wf_step.tab_index = tab_nr wf_step.index_in_tab = index_in_tab configuration.visualizer.tab_index = tab_nr configuration.visualizer.index_in_tab = index_in_tab return configuration @staticmethod def get_available_portlets(): """ :returns: a list of all the available portlet entites """ return dao.get_available_portlets() @staticmethod def get_portlet_by_id(portlet_id): """ :returns: the portlet entity with the id =@portlet_id """ return dao.get_portlet_by_id(portlet_id) @staticmethod def get_portlet_by_identifier(portlet_identifier): """ :returns: the portlet entity with the algorithm identifier =@portlet_identifier """ return dao.get_portlet_by_identifier(portlet_identifier) def launch_burst(self, burst_configuration, simulator_index, simulator_id, user_id, launch_mode=LAUNCH_NEW): """ Given a burst configuration and all the necessary data do the actual launch. :param burst_configuration: BurstConfiguration :param simulator_index: the position within the workflows step list that the simulator will take. This is needed so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from. :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo group and category that are then passed to the launcher's prepare_operation method. :param user_id: the id of the user that launched this burst :param launch_mode: new/branch/continue """ ## 1. Prepare BurstConfiguration entity if launch_mode == LAUNCH_NEW: ## Fully new entity for new simulation burst_config = burst_configuration.clone() if burst_config.name is None: new_id = dao.get_max_burst_id() + 1 burst_config.name = "simulation_" + str(new_id) else: ## Branch or Continue simulation burst_config = burst_configuration simulation_state = dao.get_generic_entity( SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS, burst_config.id, "fk_parent_burst" ) if simulation_state is None or len(simulation_state) < 1: exc = BurstServiceException( "Simulation State not found for %s, " "thus we are unable to branch from it!" % burst_config.name ) self.logger.error(exc) raise exc simulation_state = simulation_state[0] burst_config.update_simulation_parameter("simulation_state", simulation_state.gid) burst_config = burst_configuration.clone() count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project) burst_config.name = burst_config.name + "_" + launch_mode + str(count) ## 2. Create Operations and do the actual launch if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]: ## New Burst entry in the history burst_id = self._store_burst_config(burst_config) thread = threading.Thread( target=self._async_launch_and_prepare, kwargs={ "burst_config": burst_config, "simulator_index": simulator_index, "simulator_id": simulator_id, "user_id": user_id, }, ) thread.start() return burst_id, burst_config.name else: ## Continue simulation ## TODO return burst_config.id, burst_config.name @transactional def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare all required operations for burst launch. """ project_id = burst_config.fk_project burst_id = burst_config.id workflow_step_list = [] starting_index = simulator_index + 1 sim_algo = FlowService().get_algorithm_by_identifier(simulator_id) metadata = {DataTypeMetaData.KEY_BURST: burst_id} launch_data = burst_config.get_all_simulator_values()[0] operations, group = self.operation_service.prepare_operations( user_id, project_id, sim_algo, sim_algo.algo_group.group_category, metadata, **launch_data ) group_launched = group is not None if group_launched: starting_index += 1 for tab in burst_config.tabs: for portlet_cfg in tab.portlets: ### For each portlet configuration stored, update the step index ### ### and also change the dynamic parameters step indexes to point ### ### to the simulator outputs. ## if portlet_cfg is not None: analyzers = portlet_cfg.analyzers visualizer = portlet_cfg.visualizer for entry in analyzers: entry.step_index = starting_index self.workflow_service.set_dynamic_step_references(entry, simulator_index) workflow_step_list.append(entry) starting_index += 1 ### Change the dynamic parameters to point to the last adapter from this portlet execution. visualizer.step_visible = False if len(workflow_step_list) > 0 and isinstance(workflow_step_list[-1], model.WorkflowStep): self.workflow_service.set_dynamic_step_references(visualizer, workflow_step_list[-1].step_index) else: self.workflow_service.set_dynamic_step_references(visualizer, simulator_index) ### Only for a single operation have the step of visualization, otherwise is useless. if not group_launched: workflow_step_list.append(visualizer) if group_launched: ### For a group of operations, make sure the metric for PSE view ### is also computed, immediately after the simulation. metric_algo, metric_group = FlowService().get_algorithm_by_module_and_class( MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS ) _, metric_interface = FlowService().prepare_adapter(project_id, metric_group) dynamics = {} for entry in metric_interface: # We have a select that should be the dataType and a select multiple with the # required metric algorithms to be evaluated. Only dynamic parameter should be # the select type. if entry[ABCAdapter.KEY_TYPE] == "select": dynamics[entry[ABCAdapter.KEY_NAME]] = { WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0, WorkflowStepConfiguration.STEP_INDEX_KEY: simulator_index, } metric_step = model.WorkflowStep( algorithm_id=metric_algo.id, step_index=simulator_index + 1, static_param={}, dynamic_param=dynamics ) metric_step.step_visible = False workflow_step_list.insert(0, metric_step) workflows = self.workflow_service.create_and_store_workflow( project_id, burst_id, simulator_index, simulator_id, operations ) self.operation_service.prepare_operations_for_workflowsteps( workflow_step_list, workflows, user_id, burst_id, project_id, group, operations ) operation_ids = [operation.id for operation in operations] return operation_ids def _async_launch_and_prepare(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare operations asynchronously. """ try: operation_ids = self._prepare_operations(burst_config, simulator_index, simulator_id, user_id) self.logger.debug("Starting a total of %s workflows" % (len(operation_ids))) wf_errs = 0 for operation_id in operation_ids: try: OperationService().launch_operation(operation_id, True) except Exception, excep: self.logger.error(excep) wf_errs += 1 self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep)) self.logger.debug( "Finished launching workflows. " + str(len(operation_ids) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps" ) except Exception, excep: self.logger.error(excep) self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep))
class BurstService(): """ Service layer for Burst related entities. """ def __init__(self): self.operation_service = OperationService() self.workflow_service = WorkflowService() self.logger = get_logger(self.__class__.__module__) def build_portlet_interface(self, portlet_configuration, project_id): """ From a portlet_id and a project_id, first build the portlet entity then get it's configurable interface. :param portlet_configuration: a portlet configuration entity. It holds at the least the portlet_id, and in case any default parameters were saved they can be rebuilt from the analyzers // visualizer parameters :param project_id: the id of the current project :returns: the portlet interface will be of the following form:: [{'interface': adapter_interface, 'prefix': prefix_for_parameter_names, 'subalg': {algorithm_field_name: default_algorithm_value}, 'algo_group': algorithm_group, 'alg_ui_name': displayname}, ......] A list of dictionaries for each adapter that makes up the portlet. """ portlet_entity = dao.get_portlet_by_id( portlet_configuration.portlet_id) if portlet_entity is None: raise InvalidPortletConfiguration( "No portlet entity located in database with id=%s. " "Portlet configuration %s is not valid." % (portlet_configuration.portlet_id, portlet_configuration)) portlet_configurer = PortletConfigurer(portlet_entity) portlet_interface = portlet_configurer.get_configurable_interface() self.logger.debug("Created interface for portlet " + str([portlet_entity])) for adapter_conf in portlet_interface: interface = adapter_conf.interface interface = FlowService().prepare_parameters( interface, project_id, adapter_conf.group.fk_category) interface = ABCAdapter.prepare_param_names(interface, adapter_conf.prefix) adapter_conf.interface = interface portlet_configurer.update_default_values(portlet_interface, portlet_configuration) portlet_configurer.prefix_adapters_parameters(portlet_interface) return portlet_interface @staticmethod def update_portlet_configuration(portlet_configuration, submited_parameters): """ :param portlet_configuration: the portlet configuration that needs to be updated :param submited_parameters: a list of parameters as submitted from the UI. This is a dictionary in the form : {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}} All names are prefixed with adapter specific generated prefix. """ portlet_entity = dao.get_portlet_by_id( portlet_configuration.portlet_id) portlet_configurer = PortletConfigurer(portlet_entity) return portlet_configurer.update_portlet_configuration( portlet_configuration, submited_parameters) @staticmethod def new_burst_configuration(project_id): """ Return a new burst configuration entity with all the default values. """ burst_configuration = model.BurstConfiguration(project_id) burst_configuration.selected_tab = 0 BurstService.set_default_portlets(burst_configuration) return burst_configuration @staticmethod def set_default_portlets(burst_configuration): """ Sets the default portlets for the specified burst configuration. The default portlets are specified in the __init__.py script from tvb root. """ for tab_idx, value in DEFAULT_PORTLETS.items(): for sel_idx, portlet_identifier in value.items(): portlet = BurstService.get_portlet_by_identifier( portlet_identifier) if portlet is not None: portlet_configuration = BurstService.new_portlet_configuration( portlet.id, tab_idx, sel_idx, portlet.algorithm_identifier) burst_configuration.set_portlet(tab_idx, sel_idx, portlet_configuration) @staticmethod def _store_burst_config(burst_config): """ Store a burst configuration entity. """ burst_config.prepare_before_save() saved_entity = dao.store_entity(burst_config) return saved_entity.id @staticmethod def get_available_bursts(project_id): """ Return all the burst for the current project. """ bursts = dao.get_bursts_for_project( project_id, page_size=MAX_BURSTS_DISPLAYED) or [] for burst in bursts: burst.prepare_after_load() return bursts @staticmethod def populate_burst_disk_usage(bursts): """ Adds a disk_usage field to each burst object. The disk usage is computed as the sum of the datatypes generated by a burst """ sizes = dao.compute_bursts_disk_size([b.id for b in bursts]) for b in bursts: b.disk_size = format_bytes_human(sizes[b.id]) @staticmethod def rename_burst(burst_id, new_name): """ Rename the burst given by burst_id, setting it's new name to burst_name. """ burst = dao.get_burst_by_id(burst_id) burst.name = new_name dao.store_entity(burst) def load_burst(self, burst_id): """ :param burst_id: the id of the burst that should be loaded Having this input the method should: - load the entity from the DB - get all the workflow steps for the saved burst id - go trough the visualization workflow steps to create the tab configuration of the burst using the tab_index and index_in_tab fields saved on each workflow_step """ burst = dao.get_burst_by_id(burst_id) burst.prepare_after_load() burst.reset_tabs() burst_workflows = dao.get_workflows_for_burst(burst.id) group_gid = None if len(burst_workflows) == 1: # A simple burst with no range parameters burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0]) elif len(burst_workflows) > 1: # A burst workflow with a range of values, created multiple workflows and need # to launch parameter space exploration with the resulted group self.__populate_tabs_from_workflow(burst, burst_workflows[0]) executed_steps = dao.get_workflow_steps(burst_workflows[0].id) operation = dao.get_operation_by_id(executed_steps[0].fk_operation) if operation.operation_group: workflow_group = dao.get_datatypegroup_by_op_group_id( operation.operation_group.id) group_gid = workflow_group.gid return burst, group_gid @staticmethod def __populate_tabs_from_workflow(burst_entity, workflow): """ Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations generated from the steps of the workflow. """ visualizers = dao.get_visualization_steps(workflow.id) for entry in visualizers: ## For each visualize step, also load all of the analyze steps. portlet_cfg = PortletConfiguration(entry.fk_portlet) portlet_cfg.set_visualizer(entry) analyzers = dao.get_workflow_steps_for_position( entry.fk_workflow, entry.tab_index, entry.index_in_tab) portlet_cfg.set_analyzers(analyzers) burst_entity.tabs[entry.tab_index].portlets[ entry.index_in_tab] = portlet_cfg return burst_entity def load_tab_configuration(self, burst_entity, op_id): """ Given a burst entity and an operation id, find the workflow to which the op_id belongs and the load the burst_entity's tab configuration with those workflow steps. """ originating_workflow = dao.get_workflow_for_operation_id(op_id) burst_entity = self.__populate_tabs_from_workflow( burst_entity, originating_workflow) return burst_entity @staticmethod def new_portlet_configuration(portlet_id, tab_nr=-1, index_in_tab=-1, portlet_name='Default'): """ Return a new portlet configuration entitiy with default parameters. :param portlet_id: the id of the portlet for which a configuration will be stored :param tab_nr: the index of the currently selected tab :param index_in_tab: the index from the currently selected tab """ portlet_entity = dao.get_portlet_by_id(portlet_id) if portlet_entity is None: raise InvalidPortletConfiguration( "No portlet entity located in database with id=%s." % portlet_id) portlet_configurer = PortletConfigurer(portlet_entity) configuration = portlet_configurer.create_new_portlet_configuration( portlet_name) for wf_step in configuration.analyzers: wf_step.tab_index = tab_nr wf_step.index_in_tab = index_in_tab configuration.visualizer.tab_index = tab_nr configuration.visualizer.index_in_tab = index_in_tab return configuration @staticmethod def get_available_portlets(): """ :returns: a list of all the available portlet entites """ return dao.get_available_portlets() @staticmethod def get_portlet_by_id(portlet_id): """ :returns: the portlet entity with the id =@portlet_id """ return dao.get_portlet_by_id(portlet_id) @staticmethod def get_portlet_by_identifier(portlet_identifier): """ :returns: the portlet entity with the algorithm identifier =@portlet_identifier """ return dao.get_portlet_by_identifier(portlet_identifier) def launch_burst(self, burst_configuration, simulator_index, simulator_id, user_id, launch_mode=LAUNCH_NEW): """ Given a burst configuration and all the necessary data do the actual launch. :param burst_configuration: BurstConfiguration :param simulator_index: the position within the workflows step list that the simulator will take. This is needed so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from. :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo group and category that are then passed to the launcher's prepare_operation method. :param user_id: the id of the user that launched this burst :param launch_mode: new/branch/continue """ ## 1. Prepare BurstConfiguration entity if launch_mode == LAUNCH_NEW: ## Fully new entity for new simulation burst_config = burst_configuration.clone() if burst_config.name is None: new_id = dao.get_max_burst_id() + 1 burst_config.name = 'simulation_' + str(new_id) else: ## Branch or Continue simulation burst_config = burst_configuration simulation_state = dao.get_generic_entity( SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS, burst_config.id, "fk_parent_burst") if simulation_state is None or len(simulation_state) < 1: exc = BurstServiceException( "Simulation State not found for %s, " "thus we are unable to branch from it!" % burst_config.name) self.logger.error(exc) raise exc simulation_state = simulation_state[0] burst_config.update_simulation_parameter("simulation_state", simulation_state.gid) burst_config = burst_configuration.clone() count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project) burst_config.name = burst_config.name + "_" + launch_mode + str( count) ## 2. Create Operations and do the actual launch if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]: ## New Burst entry in the history burst_id = self._store_burst_config(burst_config) thread = threading.Thread(target=self._async_launch_and_prepare, kwargs={ 'burst_config': burst_config, 'simulator_index': simulator_index, 'simulator_id': simulator_id, 'user_id': user_id }) thread.start() return burst_id, burst_config.name else: ## Continue simulation ## TODO return burst_config.id, burst_config.name @transactional def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare all required operations for burst launch. """ project_id = burst_config.fk_project burst_id = burst_config.id workflow_step_list = [] starting_index = simulator_index + 1 sim_algo = FlowService().get_algorithm_by_identifier(simulator_id) metadata = {DataTypeMetaData.KEY_BURST: burst_id} launch_data = burst_config.get_all_simulator_values()[0] operations, group = self.operation_service.prepare_operations( user_id, project_id, sim_algo, sim_algo.algo_group.group_category, metadata, **launch_data) group_launched = group is not None if group_launched: starting_index += 1 for tab in burst_config.tabs: for portlet_cfg in tab.portlets: ### For each portlet configuration stored, update the step index ### ### and also change the dynamic parameters step indexes to point ### ### to the simulator outputs. ## if portlet_cfg is not None: analyzers = portlet_cfg.analyzers visualizer = portlet_cfg.visualizer for entry in analyzers: entry.step_index = starting_index self.workflow_service.set_dynamic_step_references( entry, simulator_index) workflow_step_list.append(entry) starting_index += 1 ### Change the dynamic parameters to point to the last adapter from this portlet execution. visualizer.step_visible = False if len(workflow_step_list) > 0 and isinstance( workflow_step_list[-1], model.WorkflowStep): self.workflow_service.set_dynamic_step_references( visualizer, workflow_step_list[-1].step_index) else: self.workflow_service.set_dynamic_step_references( visualizer, simulator_index) ### Only for a single operation have the step of visualization, otherwise is useless. if not group_launched: workflow_step_list.append(visualizer) if group_launched: ### For a group of operations, make sure the metric for PSE view ### is also computed, immediately after the simulation. metric_algo, metric_group = FlowService( ).get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS) _, metric_interface = FlowService().prepare_adapter( project_id, metric_group) dynamics = {} for entry in metric_interface: # We have a select that should be the dataType and a select multiple with the # required metric algorithms to be evaluated. Only dynamic parameter should be # the select type. if entry[ABCAdapter.KEY_TYPE] == 'select': dynamics[entry[ABCAdapter.KEY_NAME]] = { WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0, WorkflowStepConfiguration.STEP_INDEX_KEY: simulator_index } metric_step = model.WorkflowStep(algorithm_id=metric_algo.id, step_index=simulator_index + 1, static_param={}, dynamic_param=dynamics) metric_step.step_visible = False workflow_step_list.insert(0, metric_step) workflows = self.workflow_service.create_and_store_workflow( project_id, burst_id, simulator_index, simulator_id, operations) self.operation_service.prepare_operations_for_workflowsteps( workflow_step_list, workflows, user_id, burst_id, project_id, group, operations) operation_ids = [operation.id for operation in operations] return operation_ids def _async_launch_and_prepare(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare operations asynchronously. """ try: operation_ids = self._prepare_operations(burst_config, simulator_index, simulator_id, user_id) self.logger.debug("Starting a total of %s workflows" % (len(operation_ids, ))) wf_errs = 0 for operation_id in operation_ids: try: OperationService().launch_operation(operation_id, True) except Exception, excep: self.logger.error(excep) wf_errs += 1 self.workflow_service.mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operation_ids) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception, excep: self.logger.error(excep) self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep))
class TestWorkflow(TransactionalTestCase): """ Test that workflow conversion methods are valid. """ def transactional_setup_method(self): """ Sets up the testing environment; saves config file; creates a test user, a test project; creates burst, operation, flow and workflow services """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.workflow_service = WorkflowService() self.burst_service = BurstService() self.operation_service = OperationService() self.flow_service = FlowService() def transactional_teardown_method(self): """ Remove project folders and clean up database. """ FilesHelper().remove_project_structure(self.test_project.name) self.delete_project_folders() def __create_complex_workflow(self, workflow_step_list): """ Creates a burst with a complex workflow with a given list of workflow steps. :param workflow_step_list: a list of workflow steps that will be used in the creation of a new workflow for a new burst """ burst_config = TestFactory.store_burst(self.test_project.id) stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1()) first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class("tvb.tests.framework.adapters.testadapter1", "TestAdapterDatatypeInput") metadata = {DataTypeMetaData.KEY_BURST: burst_config.id} kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'} operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, first_step_algorithm, first_step_algorithm.algorithm_category, metadata, **kwargs) workflows = self.workflow_service.create_and_store_workflow(project_id=self.test_project.id, burst_id=burst_config.id, simulator_index=0, simulator_id=first_step_algorithm.id, operations=operations) self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id, burst_config.id, self.test_project.id, group, operations) #fire the first op if len(operations) > 0: self.operation_service.launch_operation(operations[0].id, False) return burst_config.id def test_workflow_generation(self): """ A simple test just for the fact that a workflow is created an ran, no dynamic parameters are passed. In this case we create a two steps workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2 step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1 The first adapter doesn't return anything and the second returns one tvb.datatypes.datatype1.Datatype1 instance. We check that the steps are actually ran by checking that two operations are created and that one dataType is stored. """ workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2", "TestAdapter2", step_index=1, static_kwargs={"test2": 2}), TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1", "TestAdapter1", step_index=2, static_kwargs={"test1_val1": 1, "test1_val2": 1})] self.__create_complex_workflow(workflow_step_list) stored_datatypes = dao.get_datatypes_in_project(self.test_project.id) assert len(stored_datatypes) == 2, "DataType from second step was not stored." assert stored_datatypes[0].type == 'Datatype1', "Wrong type was stored." assert stored_datatypes[1].type == 'Datatype1', "Wrong type was stored." finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id) assert finished == 3, "Didnt start operations for both adapters in workflow." assert started == 0, "Some operations from workflow didnt finish." assert error == 0, "Some operations finished with error status." def test_workflow_dynamic_params(self): """ A simple test just for the fact that dynamic parameters are passed properly between two workflow steps: step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1 step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3 The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. The second adapter has this passed as a dynamic workflow parameter. We check that the steps are actually ran by checking that two operations are created and that two dataTypes are stored. """ workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1", "TestAdapter1", step_index=1, static_kwargs={"test1_val1": 1, "test1_val2": 1}), TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter3", "TestAdapter3", step_index=2, dynamic_kwargs={ "test": {wf_cfg.DATATYPE_INDEX_KEY: 0, wf_cfg.STEP_INDEX_KEY: 1}})] self.__create_complex_workflow(workflow_step_list) stored_datatypes = dao.get_datatypes_in_project(self.test_project.id) assert len(stored_datatypes) == 3, "DataType from all step were not stored." for result_row in stored_datatypes: assert result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored." finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id) assert finished == 3, "Didn't start operations for both adapters in workflow." assert started == 0, "Some operations from workflow didn't finish." assert error == 0, "Some operations finished with error status." def test_configuration2workflow(self): """ Test that building a WorkflowStep from a WorkflowStepConfiguration. Make sure all the data is correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index. """ workflow_step = TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1", "TestAdapter1", static_kwargs={"static_param": "test"}, dynamic_kwargs={"dynamic_param": {wf_cfg.STEP_INDEX_KEY: 0, wf_cfg.DATATYPE_INDEX_KEY: 0}}, step_index=1, base_step=5) assert workflow_step.step_index == 1, "Wrong step index in created workflow step." assert workflow_step.static_param == {'static_param': 'test'}, 'Different static parameters on step.' assert workflow_step.dynamic_param == {'dynamic_param': {wf_cfg.STEP_INDEX_KEY: 5, wf_cfg.DATATYPE_INDEX_KEY: 0}},\ "Dynamic parameters not saved properly, or base workflow index not added to step index." def test_create_workflow(self): """ Test that a workflow with all the associated workflow steps is actually created. """ workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2", "TestAdapter2", step_index=1, static_kwargs={"test2": 2}), TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1", "TestAdapter1", step_index=2, static_kwargs={"test1_val1": 1, "test1_val2": 1})] burst_id = self.__create_complex_workflow(workflow_step_list) workflow_entities = dao.get_workflows_for_burst(burst_id) assert len(workflow_entities) == 1, "For some reason workflow was not stored in database." workflow_steps = dao.get_workflow_steps(workflow_entities[0].id) assert len(workflow_steps) == len(workflow_step_list) + 1, "Wrong number of workflow steps created."
class WorkflowTest(TransactionalTestCase): """ Test that workflow conversion methods are valid. """ def setUp(self): """ Sets up the testing environment; saves config file; creates a test user, a test project; creates burst, operation, flow and workflow services """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.workflow_service = WorkflowService() self.burst_service = BurstService() self.operation_service = OperationService() self.flow_service = FlowService() def tearDown(self): """ Remove project folders and clean up database. """ FilesHelper().remove_project_structure(self.test_project.name) self.delete_project_folders() def __create_complex_workflow(self, workflow_step_list): """ Creates a burst with a complex workflow with a given list of workflow steps. :param workflow_step_list: a list of workflow steps that will be used in the creation of a new workflow for a new burst """ burst_config = TestFactory.store_burst(self.test_project.id) stored_dt = datatypes_factory.DatatypesFactory()._store_datatype( Datatype1()) first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class( "tvb.tests.framework.adapters.testadapter1", "TestAdapterDatatypeInput") metadata = {DataTypeMetaData.KEY_BURST: burst_config.id} kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'} operations, group = self.operation_service.prepare_operations( self.test_user.id, self.test_project.id, first_step_algorithm, first_step_algorithm.algorithm_category, metadata, **kwargs) workflows = self.workflow_service.create_and_store_workflow( project_id=self.test_project.id, burst_id=burst_config.id, simulator_index=0, simulator_id=first_step_algorithm.id, operations=operations) self.operation_service.prepare_operations_for_workflowsteps( workflow_step_list, workflows, self.test_user.id, burst_config.id, self.test_project.id, group, operations) #fire the first op if len(operations) > 0: self.operation_service.launch_operation(operations[0].id, False) return burst_config.id def test_workflow_generation(self): """ A simple test just for the fact that a workflow is created an ran, no dynamic parameters are passed. In this case we create a two steps workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2 step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1 The first adapter doesn't return anything and the second returns one tvb.datatypes.datatype1.Datatype1 instance. We check that the steps are actually ran by checking that two operations are created and that one dataType is stored. """ workflow_step_list = [ TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter2", "TestAdapter2", step_index=1, static_kwargs={"test2": 2}), TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1", step_index=2, static_kwargs={ "test1_val1": 1, "test1_val2": 1 }) ] self.__create_complex_workflow(workflow_step_list) stored_datatypes = dao.get_datatypes_in_project(self.test_project.id) self.assertTrue( len(stored_datatypes) == 2, "DataType from second step was not stored.") self.assertTrue(stored_datatypes[0].type == 'Datatype1', "Wrong type was stored.") self.assertTrue(stored_datatypes[1].type == 'Datatype1', "Wrong type was stored.") finished, started, error, _, _ = dao.get_operation_numbers( self.test_project.id) self.assertEqual( finished, 3, "Didnt start operations for both adapters in workflow.") self.assertEqual(started, 0, "Some operations from workflow didnt finish.") self.assertEqual(error, 0, "Some operations finished with error status.") def test_workflow_dynamic_params(self): """ A simple test just for the fact that dynamic parameters are passed properly between two workflow steps: step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1 step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3 The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. The second adapter has this passed as a dynamic workflow parameter. We check that the steps are actually ran by checking that two operations are created and that two dataTypes are stored. """ workflow_step_list = [ TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1", step_index=1, static_kwargs={ "test1_val1": 1, "test1_val2": 1 }), TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter3", "TestAdapter3", step_index=2, dynamic_kwargs={ "test": { wf_cfg.DATATYPE_INDEX_KEY: 0, wf_cfg.STEP_INDEX_KEY: 1 } }) ] self.__create_complex_workflow(workflow_step_list) stored_datatypes = dao.get_datatypes_in_project(self.test_project.id) self.assertTrue( len(stored_datatypes) == 3, "DataType from all step were not stored.") for result_row in stored_datatypes: self.assertTrue(result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored.") finished, started, error, _, _ = dao.get_operation_numbers( self.test_project.id) self.assertEqual( finished, 3, "Didn't start operations for both adapters in workflow.") self.assertEqual(started, 0, "Some operations from workflow didn't finish.") self.assertEqual(error, 0, "Some operations finished with error status.") def test_configuration2workflow(self): """ Test that building a WorkflowStep from a WorkflowStepConfiguration. Make sure all the data is correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index. """ workflow_step = TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1", static_kwargs={"static_param": "test"}, dynamic_kwargs={ "dynamic_param": { wf_cfg.STEP_INDEX_KEY: 0, wf_cfg.DATATYPE_INDEX_KEY: 0 } }, step_index=1, base_step=5) self.assertEqual(workflow_step.step_index, 1, "Wrong step index in created workflow step.") self.assertEqual(workflow_step.static_param, {'static_param': 'test'}, 'Different static parameters on step.') self.assertEqual( workflow_step.dynamic_param, { 'dynamic_param': { wf_cfg.STEP_INDEX_KEY: 5, wf_cfg.DATATYPE_INDEX_KEY: 0 } }, "Dynamic parameters not saved properly, or base workflow index not added to step index." ) def test_create_workflow(self): """ Test that a workflow with all the associated workflow steps is actually created. """ workflow_step_list = [ TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter2", "TestAdapter2", step_index=1, static_kwargs={"test2": 2}), TestFactory.create_workflow_step( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1", step_index=2, static_kwargs={ "test1_val1": 1, "test1_val2": 1 }) ] burst_id = self.__create_complex_workflow(workflow_step_list) workflow_entities = dao.get_workflows_for_burst(burst_id) self.assertTrue( len(workflow_entities) == 1, "For some reason workflow was not stored in database.") workflow_steps = dao.get_workflow_steps(workflow_entities[0].id) self.assertEqual(len(workflow_steps), len(workflow_step_list) + 1, "Wrong number of workflow steps created.")
class BurstService(object): """ Service layer for Burst related entities. """ def __init__(self): self.operation_service = OperationService() self.workflow_service = WorkflowService() self.logger = get_logger(self.__class__.__module__) self.cache_portlet_configurators = {} def build_portlet_interface(self, portlet_configuration, project_id): """ From a portlet_id and a project_id, first build the portlet entity then get it's configurable interface. :param portlet_configuration: a portlet configuration entity. It holds at the least the portlet_id, and in case any default parameters were saved they can be rebuilt from the analyzers // visualizer parameters :param project_id: the id of the current project :returns: the portlet interface will be of the following form:: [{'interface': adapter_interface, 'prefix': prefix_for_parameter_names, 'subalg': {algorithm_field_name: default_algorithm_value}, 'algo_group': algorithm_group, 'alg_ui_name': displayname}, ......] A list of dictionaries for each adapter that makes up the portlet. """ portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id) portlet_interface = portlet_configurer.get_configurable_interface() for adapter_conf in portlet_interface: interface = adapter_conf.interface itree_mngr = InputTreeManager() interface = itree_mngr.fill_input_tree_with_options(interface, project_id, adapter_conf.stored_adapter.fk_category) adapter_conf.interface = itree_mngr.prepare_param_names(interface) portlet_configurer.update_default_values(portlet_interface, portlet_configuration) portlet_configurer.prefix_adapters_parameters(portlet_interface) return portlet_interface def _get_portlet_configurer(self, portlet_id): if portlet_id not in self.cache_portlet_configurators: portlet_entity = dao.get_portlet_by_id(portlet_id) if portlet_entity is None: raise InvalidPortletConfiguration("No portlet entity located in database with id=%s. " % portlet_id) self.cache_portlet_configurators[portlet_id] = PortletConfigurer(portlet_entity) self.logger.debug("Recently parsed portlet XML:" + str([portlet_entity])) return self.cache_portlet_configurators[portlet_id] def update_portlet_configuration(self, portlet_configuration, submited_parameters): """ :param portlet_configuration: the portlet configuration that needs to be updated :param submited_parameters: a list of parameters as submitted from the UI. This is a dictionary in the form : {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}} All names are prefixed with adapter specific generated prefix. """ portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id) return portlet_configurer.update_portlet_configuration(portlet_configuration, submited_parameters) def new_burst_configuration(self, project_id): """ Return a new burst configuration entity with all the default values. """ burst_configuration = model.BurstConfiguration(project_id) burst_configuration.selected_tab = 0 # Now set the default portlets for the specified burst configuration. # The default portlets are specified in the __init__.py script from tvb root. for tab_idx, value in DEFAULT_PORTLETS.items(): for sel_idx, portlet_identifier in value.items(): portlet = BurstService.get_portlet_by_identifier(portlet_identifier) if portlet is not None: portlet_configuration = self.new_portlet_configuration(portlet.id, tab_idx, sel_idx, portlet.algorithm_identifier) burst_configuration.set_portlet(tab_idx, sel_idx, portlet_configuration) return burst_configuration @staticmethod def _store_burst_config(burst_config): """ Store a burst configuration entity. """ burst_config.prepare_before_save() saved_entity = dao.store_entity(burst_config) return saved_entity.id @staticmethod def get_available_bursts(project_id): """ Return all the burst for the current project. """ bursts = dao.get_bursts_for_project(project_id, page_size=MAX_BURSTS_DISPLAYED) or [] for burst in bursts: burst.prepare_after_load() return bursts @staticmethod def populate_burst_disk_usage(bursts): """ Adds a disk_usage field to each burst object. The disk usage is computed as the sum of the datatypes generated by a burst """ sizes = dao.compute_bursts_disk_size([b.id for b in bursts]) for b in bursts: b.disk_size = format_bytes_human(sizes[b.id]) @staticmethod def rename_burst(burst_id, new_name): """ Rename the burst given by burst_id, setting it's new name to burst_name. """ burst = dao.get_burst_by_id(burst_id) burst.name = new_name dao.store_entity(burst) def load_burst(self, burst_id): """ :param burst_id: the id of the burst that should be loaded Having this input the method should: - load the entity from the DB - get all the workflow steps for the saved burst id - go trough the visualization workflow steps to create the tab configuration of the burst using the tab_index and index_in_tab fields saved on each workflow_step """ burst = dao.get_burst_by_id(burst_id) burst.prepare_after_load() burst.reset_tabs() burst_workflows = dao.get_workflows_for_burst(burst.id) group_gid = None if len(burst_workflows) == 1: # A simple burst with no range parameters burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0]) elif len(burst_workflows) > 1: # A burst workflow with a range of values, created multiple workflows and need # to launch parameter space exploration with the resulted group self.__populate_tabs_from_workflow(burst, burst_workflows[0]) executed_steps = dao.get_workflow_steps(burst_workflows[0].id) operation = dao.get_operation_by_id(executed_steps[0].fk_operation) if operation.operation_group: workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id) group_gid = workflow_group.gid return burst, group_gid @staticmethod def __populate_tabs_from_workflow(burst_entity, workflow): """ Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations generated from the steps of the workflow. """ visualizers = dao.get_visualization_steps(workflow.id) for entry in visualizers: ## For each visualize step, also load all of the analyze steps. portlet_cfg = PortletConfiguration(entry.fk_portlet) portlet_cfg.set_visualizer(entry) analyzers = dao.get_workflow_steps_for_position(entry.fk_workflow, entry.tab_index, entry.index_in_tab) portlet_cfg.set_analyzers(analyzers) burst_entity.tabs[entry.tab_index].portlets[entry.index_in_tab] = portlet_cfg return burst_entity def load_tab_configuration(self, burst_entity, op_id): """ Given a burst entity and an operation id, find the workflow to which the op_id belongs and the load the burst_entity's tab configuration with those workflow steps. """ originating_workflow = dao.get_workflow_for_operation_id(op_id) burst_entity = self.__populate_tabs_from_workflow(burst_entity, originating_workflow) return burst_entity def new_portlet_configuration(self, portlet_id, tab_nr=-1, index_in_tab=-1, portlet_name='Default'): """ Return a new portlet configuration entity with default parameters. :param portlet_id: the id of the portlet for which a configuration will be stored :param tab_nr: the index of the currently selected tab :param index_in_tab: the index from the currently selected tab """ portlet_configurer = self._get_portlet_configurer(portlet_id) configuration = portlet_configurer.create_new_portlet_configuration(portlet_name) for wf_step in configuration.analyzers: wf_step.tab_index = tab_nr wf_step.index_in_tab = index_in_tab configuration.visualizer.tab_index = tab_nr configuration.visualizer.index_in_tab = index_in_tab return configuration @staticmethod def get_available_portlets(): """ :returns: a list of all the available portlet entites """ return dao.get_available_portlets() @staticmethod def get_portlet_by_id(portlet_id): """ :returns: the portlet entity with the id =@portlet_id """ return dao.get_portlet_by_id(portlet_id) @staticmethod def get_portlet_by_identifier(portlet_identifier): """ :returns: the portlet entity with the algorithm identifier =@portlet_identifier """ return dao.get_portlet_by_identifier(portlet_identifier) def launch_burst(self, burst_configuration, simulator_index, simulator_id, user_id, launch_mode=LAUNCH_NEW): """ Given a burst configuration and all the necessary data do the actual launch. :param burst_configuration: BurstConfiguration :param simulator_index: the position within the workflows step list that the simulator will take. This is needed so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from. :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo group and category that are then passed to the launcher's prepare_operation method. :param user_id: the id of the user that launched this burst :param launch_mode: new/branch/continue """ ## 1. Prepare BurstConfiguration entity if launch_mode == LAUNCH_NEW: ## Fully new entity for new simulation burst_config = burst_configuration.clone() if burst_config.name is None: new_id = dao.get_max_burst_id() + 1 burst_config.name = 'simulation_' + str(new_id) else: ## Branch or Continue simulation burst_config = burst_configuration simulation_state = dao.get_generic_entity(SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS, burst_config.id, "fk_parent_burst") if simulation_state is None or len(simulation_state) < 1: exc = BurstServiceException("Simulation State not found for %s, " "thus we are unable to branch from it!" % burst_config.name) self.logger.error(exc) raise exc simulation_state = simulation_state[0] burst_config.update_simulation_parameter("simulation_state", simulation_state.gid) burst_config = burst_configuration.clone() count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project) burst_config.name = burst_config.name + "_" + launch_mode + str(count) ## 2. Create Operations and do the actual launch if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]: ## New Burst entry in the history burst_id = self._store_burst_config(burst_config) thread = threading.Thread(target=self._async_launch_and_prepare, kwargs={'burst_config': burst_config, 'simulator_index': simulator_index, 'simulator_id': simulator_id, 'user_id': user_id}) thread.start() return burst_id, burst_config.name else: ## Continue simulation ## TODO return burst_config.id, burst_config.name @transactional def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare all required operations for burst launch. """ project_id = burst_config.fk_project burst_id = burst_config.id workflow_step_list = [] starting_index = simulator_index + 1 sim_algo = FlowService().get_algorithm_by_identifier(simulator_id) metadata = {DataTypeMetaData.KEY_BURST: burst_id} launch_data = burst_config.get_all_simulator_values()[0] operations, group = self.operation_service.prepare_operations(user_id, project_id, sim_algo, sim_algo.algorithm_category, metadata, **launch_data) group_launched = group is not None if group_launched: starting_index += 1 for tab in burst_config.tabs: for portlet_cfg in tab.portlets: ### For each portlet configuration stored, update the step index ### ### and also change the dynamic parameters step indexes to point ### ### to the simulator outputs. ## if portlet_cfg is not None: analyzers = portlet_cfg.analyzers visualizer = portlet_cfg.visualizer for entry in analyzers: entry.step_index = starting_index self.workflow_service.set_dynamic_step_references(entry, simulator_index) workflow_step_list.append(entry) starting_index += 1 ### Change the dynamic parameters to point to the last adapter from this portlet execution. visualizer.step_visible = False if len(workflow_step_list) > 0 and isinstance(workflow_step_list[-1], model.WorkflowStep): self.workflow_service.set_dynamic_step_references(visualizer, workflow_step_list[-1].step_index) else: self.workflow_service.set_dynamic_step_references(visualizer, simulator_index) ### Only for a single operation have the step of visualization, otherwise is useless. if not group_launched: workflow_step_list.append(visualizer) if group_launched: ### For a group of operations, make sure the metric for PSE view ### is also computed, immediately after the simulation. metric_algo = FlowService().get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS) metric_interface = FlowService().prepare_adapter(project_id, metric_algo) dynamics = {} for entry in metric_interface: # We have a select that should be the dataType and a select multiple with the # required metric algorithms to be evaluated. Only dynamic parameter should be # the select type. if entry[KEY_TYPE] == TYPE_SELECT: dynamics[entry[KEY_NAME]] = {WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0, WorkflowStepConfiguration.STEP_INDEX_KEY: simulator_index} metric_step = model.WorkflowStep(algorithm_id=metric_algo.id, step_index=simulator_index + 1, static_param={}, dynamic_param=dynamics) metric_step.step_visible = False workflow_step_list.insert(0, metric_step) workflows = self.workflow_service.create_and_store_workflow(project_id, burst_id, simulator_index, simulator_id, operations) self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, user_id, burst_id, project_id, group, operations) operation_ids = [operation.id for operation in operations] return operation_ids def _async_launch_and_prepare(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare operations asynchronously. """ try: operation_ids = self._prepare_operations(burst_config, simulator_index, simulator_id, user_id) self.logger.debug("Starting a total of %s workflows" % (len(operation_ids, ))) wf_errs = 0 for operation_id in operation_ids: try: OperationService().launch_operation(operation_id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operation_ids) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep)) @staticmethod def launch_visualization(visualization, frame_width=None, frame_height=None, is_preview=True): """ :param visualization: a visualization workflow step """ dynamic_params = visualization.dynamic_param static_params = visualization.static_param parameters_dict = static_params current_project_id = 0 # Current operation id needed for export mechanism. So far just use ## # the operation of the workflow_step from which the inputs are taken #### for param in dynamic_params: step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY] datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY] referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index) referred_operation_id = referred_workflow_step.fk_operation referred_operation = dao.get_operation_by_id(referred_operation_id) current_project_id = referred_operation.fk_launched_in if type(datatype_index) is IntType: # Entry is the output of a previous step ## datatypes = dao.get_results_for_operation(referred_operation_id) parameters_dict[param] = datatypes[datatype_index].gid else: # Entry is the input of a previous step ### parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index] algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm) adapter_instance = ABCAdapter.build_adapter(algorithm) adapter_instance.current_project_id = current_project_id prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict) if frame_width is not None: prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height) if is_preview: result = adapter_instance.generate_preview(**prepared_inputs) else: result = adapter_instance.launch(**prepared_inputs) return result, parameters_dict def update_history_status(self, id_list): """ For each burst_id received in the id_list read new status from DB and return a list [id, new_status] pair. """ result = [] for b_id in id_list: burst = dao.get_burst_by_id(b_id) burst.prepare_after_load() if burst is not None: if burst.status == burst.BURST_RUNNING: running_time = datetime.now() - burst.start_time else: running_time = burst.finish_time - burst.start_time running_time = format_timedelta(running_time, most_significant2=False) if burst.status == burst.BURST_ERROR: msg = 'Check Operations page for error Message' else: msg = '' result.append([burst.id, burst.status, burst.is_group, msg, running_time]) else: self.logger.debug("Could not find burst with id=" + str(b_id) + ". Might have been deleted by user!!") return result def stop_burst(self, burst_entity): """ Stop all the entities for the current burst and set the burst status to canceled. """ burst_wfs = dao.get_workflows_for_burst(burst_entity.id) any_stopped = False for workflow in burst_wfs: wf_steps = dao.get_workflow_steps(workflow.id) for step in wf_steps: if step.fk_operation is not None: self.logger.debug("We will stop operation: %d" % step.fk_operation) any_stopped = self.operation_service.stop_operation(step.fk_operation) or any_stopped if any_stopped and burst_entity.status != burst_entity.BURST_CANCELED: self.workflow_service.mark_burst_finished(burst_entity, model.BurstConfiguration.BURST_CANCELED) return True return False @transactional def cancel_or_remove_burst(self, burst_id): """ Cancel (if burst is still running) or Remove the burst given by burst_id. :returns True when Remove operation was done and False when Cancel """ burst_entity = dao.get_burst_by_id(burst_id) if burst_entity.status == burst_entity.BURST_RUNNING: self.stop_burst(burst_entity) return False service = ProjectService() ## Remove each DataType in current burst. ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes. datatypes = dao.get_all_datatypes_in_burst(burst_id) ## Get operations linked to current burst before removing the burst or else ## the burst won't be there to identify operations any more. remaining_ops = dao.get_operations_in_burst(burst_id) # Remove burst first to delete work-flow steps which still hold foreign keys to operations. correct = dao.remove_entity(burst_entity.__class__, burst_id) if not correct: raise RemoveDataTypeException("Could not remove Burst entity!") for datatype in datatypes: service.remove_datatype(burst_entity.fk_project, datatype.gid, False) ## Remove all Operations remained. correct = True remaining_op_groups = set() project = dao.get_project_by_id(burst_entity.fk_project) for oper in remaining_ops: is_remaining = dao.get_generic_entity(oper.__class__, oper.id) if len(is_remaining) == 0: ### Operation removed cascaded. continue if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups: is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group) if len(is_remaining) > 0: remaining_op_groups.add(oper.fk_operation_group) correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group) correct = correct and dao.remove_entity(oper.__class__, oper.id) service.structure_helper.remove_operation_data(project.name, oper.id) if not correct: raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!") return True @staticmethod def get_portlet_status(portlet_cfg): """ Get the status of a portlet configuration. """ if portlet_cfg.analyzers: for analyze_step in portlet_cfg.analyzers: operation = dao.try_get_operation_by_id(analyze_step.fk_operation) if operation is None: return model.STATUS_ERROR, "Operation has been removed" if operation.status != model.STATUS_FINISHED: return operation.status, operation.additional_info or '' else: ## Simulator is first step so now decide if we are waiting for input or output ## visualizer = portlet_cfg.visualizer wait_on_outputs = False for entry in visualizer.dynamic_param: if type(visualizer.dynamic_param[entry][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) == IntType: wait_on_outputs = True break if wait_on_outputs: simulator_step = dao.get_workflow_step_by_step_index(visualizer.fk_workflow, 0) operation = dao.try_get_operation_by_id(simulator_step.fk_operation) if operation is None: error_msg = ("At least one simulation result was not found, it might have been removed. <br\>" "You can copy and relaunch current simulation, if you are interested in having " "your results re-computed.") return model.STATUS_ERROR, error_msg else: return operation.status, operation.additional_info or '' return model.STATUS_FINISHED, ''