def get_portlet_status(portlet_cfg): """ Get the status of a portlet configuration. """ if portlet_cfg.analyzers: for analyze_step in portlet_cfg.analyzers: operation = dao.try_get_operation_by_id(analyze_step.fk_operation) if operation is None: return model.STATUS_ERROR, "Operation has been removed" if operation.status != model.STATUS_FINISHED: return operation.status, operation.additional_info or "" else: ## Simulator is first step so now decide if we are waiting for input or output ## visualizer = portlet_cfg.visualizer wait_on_outputs = False for entry in visualizer.dynamic_param: if type(visualizer.dynamic_param[entry][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) == IntType: wait_on_outputs = True break if wait_on_outputs: simulator_step = dao.get_workflow_step_by_step_index(visualizer.fk_workflow, 0) operation = dao.try_get_operation_by_id(simulator_step.fk_operation) if operation is None: error_msg = ( "At least one simulation result was not found, it might have been removed. <br\>" "You can copy and relaunch current simulation, if you are interested in having " "your results re-computed." ) return model.STATUS_ERROR, error_msg else: return operation.status, operation.additional_info or "" return model.STATUS_FINISHED, ""
def get_portlet_status(portlet_cfg): """ Get the status of a portlet configuration. """ if portlet_cfg.analyzers: for analyze_step in portlet_cfg.analyzers: operation = dao.try_get_operation_by_id(analyze_step.fk_operation) if operation is None: return model.STATUS_ERROR, "Operation has been removed" if operation.status != model.STATUS_FINISHED: return operation.status, operation.additional_info or '' else: ## Simulator is first step so now decide if we are waiting for input or output ## visualizer = portlet_cfg.visualizer wait_on_outputs = False for entry in visualizer.dynamic_param: if type(visualizer.dynamic_param[entry][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) == IntType: wait_on_outputs = True break if wait_on_outputs: simulator_step = dao.get_workflow_step_by_step_index(visualizer.fk_workflow, 0) operation = dao.try_get_operation_by_id(simulator_step.fk_operation) if operation is None: error_msg = ("At least one simulation result was not found, it might have been removed. <br\>" "You can copy and relaunch current simulation, if you are interested in having " "your results re-computed.") return model.STATUS_ERROR, error_msg else: return operation.status, operation.additional_info or '' return model.STATUS_FINISHED, ''
def launch_visualization(visualization, frame_width=None, frame_height=None, method_name=ABCAdapter.LAUNCH_METHOD, is_preview=True): """ :param visualization: a visualization workflow step """ dynamic_params = visualization.dynamic_param static_params = visualization.static_param parameters_dict = static_params current_project_id = 0 ## Current operation id needed for export mechanism. So far just use ## ## the operation of the workflow_step from which the inputs are taken #### for param in dynamic_params: step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY] datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY] referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index) referred_operation_id = referred_workflow_step.fk_operation referred_operation = dao.get_operation_by_id(referred_operation_id) current_project_id = referred_operation.fk_launched_in if type(datatype_index) is IntType: ## Entry is the output of a previous step ## datatypes = dao.get_results_for_operation(referred_operation_id) parameters_dict[param] = datatypes[datatype_index].gid else: ## Entry is the input of a previous step ### parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index] algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm) adapter_instance = ABCAdapter.build_adapter(algorithm.algo_group) adapter_instance.current_project_id = current_project_id prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict) if frame_width is not None: prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height) if isinstance(adapter_instance, ABCMPLH5Displayer) and is_preview is True: prepared_inputs[ABCMPLH5Displayer.SHOW_FULL_TOOLBAR] = False result = eval("adapter_instance." + method_name + "(**prepared_inputs)") return result, parameters_dict
def prepare_next_step(self, last_executed_op_id): """ If the operation with id 'last_executed_op_id' resulted after the execution of a workflow step then this method will launch the operation corresponding to the next step from the workflow. """ try: current_step, next_workflow_step = self._get_data(last_executed_op_id) if next_workflow_step is not None: operation = dao.get_operation_by_id(next_workflow_step.fk_operation) dynamic_param_names = next_workflow_step.dynamic_workflow_param_names if len(dynamic_param_names) > 0: op_params = json.loads(operation.parameters) for param_name in dynamic_param_names: dynamic_param = op_params[param_name] former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow, dynamic_param[wf_cfg.STEP_INDEX_KEY]) if type(dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]) is IntType: datatypes = dao.get_results_for_operation(former_step.fk_operation) op_params[param_name] = datatypes[dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]].gid else: previous_operation = dao.get_operation_by_id(former_step.fk_operation) op_params[param_name] = json.loads(previous_operation.parameters)[ dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]] operation.parameters = json.dumps(op_params) operation = dao.store_entity(operation) return operation.id else: if current_step is not None: current_workflow = dao.get_workflow_by_id(current_step.fk_workflow) current_workflow.status = current_workflow.STATUS_FINISHED dao.store_entity(current_workflow) burst_entity = dao.get_burst_by_id(current_workflow.fk_burst) parallel_workflows = dao.get_workflows_for_burst(burst_entity.id) all_finished = True for workflow in parallel_workflows: if workflow.status == workflow.STATUS_STARTED: all_finished = False if all_finished: self.mark_burst_finished(burst_entity, success=True) disk_size = dao.get_burst_disk_size(burst_entity.id) # Transform from kB to MB if disk_size > 0: user = dao.get_project_by_id(burst_entity.fk_project).administrator user.used_disk_space = user.used_disk_space + disk_size dao.store_entity(user) else: operation = dao.get_operation_by_id(last_executed_op_id) disk_size = dao.get_disk_size_for_operation(operation.id) # Transform from kB to MB if disk_size > 0: user = dao.get_user_by_id(operation.fk_launched_by) user.used_disk_space = user.used_disk_space + disk_size dao.store_entity(user) return None except Exception, excep: self.logger.error(excep) self.logger.exception(excep) raise WorkflowInterStepsException(excep)
def prepare_next_step(self, last_executed_op_id): """ If the operation with id 'last_executed_op_id' resulted after the execution of a workflow step then this method will launch the operation corresponding to the next step from the workflow. """ try: current_step, next_workflow_step = self._get_data( last_executed_op_id) if next_workflow_step is not None: operation = dao.get_operation_by_id( next_workflow_step.fk_operation) dynamic_param_names = next_workflow_step.dynamic_workflow_param_names if len(dynamic_param_names) > 0: op_params = json.loads(operation.parameters) for param_name in dynamic_param_names: dynamic_param = op_params[param_name] former_step = dao.get_workflow_step_by_step_index( next_workflow_step.fk_workflow, dynamic_param[ WorkflowStepConfiguration.STEP_INDEX_KEY]) if type(dynamic_param[WorkflowStepConfiguration. DATATYPE_INDEX_KEY]) is IntType: datatypes = dao.get_results_for_operation( former_step.fk_operation) op_params[param_name] = datatypes[ dynamic_param[WorkflowStepConfiguration. DATATYPE_INDEX_KEY]].gid else: previous_operation = dao.get_operation_by_id( former_step.fk_operation) op_params[param_name] = json.loads( previous_operation.parameters)[ dynamic_param[WorkflowStepConfiguration. DATATYPE_INDEX_KEY]] operation.parameters = json.dumps(op_params) operation = dao.store_entity(operation) return operation.id elif current_step is not None: current_workflow = dao.get_workflow_by_id( current_step.fk_workflow) current_workflow.status = current_workflow.STATUS_FINISHED dao.store_entity(current_workflow) burst_entity = dao.get_burst_by_id(current_workflow.fk_burst) parallel_workflows = dao.get_workflows_for_burst( burst_entity.id) all_finished = True for workflow in parallel_workflows: if workflow.status == workflow.STATUS_STARTED: all_finished = False if all_finished: self.mark_burst_finished(burst_entity) return None except Exception, excep: self.logger.error(excep) self.logger.exception(excep) raise WorkflowInterStepsException(excep)
def _get_data(operation_id): """ For a given operation id, return the corresponding WorkflowStep and the NextWorkflowStep to be executed. """ executed_step = dao.get_workflow_step_for_operation(operation_id) if executed_step is not None: next_workflow_step = dao.get_workflow_step_by_step_index( executed_step.fk_workflow, executed_step.step_index + 1) return executed_step, next_workflow_step else: return None, None
def _get_data(operation_id): """ For a given operation id, return the corresponding WorkflowStep and the NextWorkflowStep to be executed. """ executed_step = dao.get_workflow_step_for_operation(operation_id) if executed_step is not None: next_workflow_step = dao.get_workflow_step_by_step_index(executed_step.fk_workflow, executed_step.step_index + 1) return executed_step, next_workflow_step else: return None, None
def get_portlet_status(portlet_cfg): """ Get the status of a portlet configuration. """ status = model.BurstConfiguration.BURST_FINISHED error_msg = '' if len(portlet_cfg.analyzers): for analyze_step in portlet_cfg.analyzers: operation = dao.get_operation_by_id(analyze_step.fk_operation) if operation is None: status = model.BurstConfiguration.BURST_ERROR error_msg = "Operation has been removed" break if operation.status == model.STATUS_STARTED: status = model.BurstConfiguration.BURST_RUNNING break if operation.status == model.STATUS_ERROR: status = model.BurstConfiguration.BURST_ERROR error_msg = operation.additional_info break if operation.status == model.STATUS_CANCELED: status = model.BurstConfiguration.BURST_CANCELED break else: ## Simulator is first step so now decide if we are waiting for input or output ## visualizer = portlet_cfg.visualizer wait_on_outputs = False for entry in visualizer.dynamic_param: if type( visualizer.dynamic_param[entry] [WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) == IntType: wait_on_outputs = True break if wait_on_outputs: simulator_step = dao.get_workflow_step_by_step_index( visualizer.fk_workflow, 0) operation = dao.get_operation_by_id( simulator_step.fk_operation) if operation is None: status = model.BurstConfiguration.BURST_ERROR error_msg = ( "At least one simulation result was not found, it might have been removed. <br\>" "You can copy and relaunch current simulation, if you are interested in having " "your results re-computed.") elif operation.status == model.STATUS_STARTED: status = model.BurstConfiguration.BURST_RUNNING elif operation.status == model.STATUS_ERROR: status = model.BurstConfiguration.BURST_ERROR error_msg = operation.additional_info elif operation.status == model.STATUS_CANCELED: status = model.BurstConfiguration.BURST_CANCELED return status, error_msg
def launch_visualization(visualization, frame_width=None, frame_height=None, is_preview=True): """ :param visualization: a visualization workflow step """ dynamic_params = visualization.dynamic_param static_params = visualization.static_param parameters_dict = static_params current_project_id = 0 # Current operation id needed for export mechanism. So far just use ## # the operation of the workflow_step from which the inputs are taken #### for param in dynamic_params: step_index = dynamic_params[param][ WorkflowStepConfiguration.STEP_INDEX_KEY] datatype_index = dynamic_params[param][ WorkflowStepConfiguration.DATATYPE_INDEX_KEY] referred_workflow_step = dao.get_workflow_step_by_step_index( visualization.fk_workflow, step_index) referred_operation_id = referred_workflow_step.fk_operation referred_operation = dao.get_operation_by_id(referred_operation_id) current_project_id = referred_operation.fk_launched_in if type(datatype_index) is IntType: # Entry is the output of a previous step ## datatypes = dao.get_results_for_operation( referred_operation_id) parameters_dict[param] = datatypes[datatype_index].gid else: # Entry is the input of a previous step ### parameters_dict[param] = json.loads( referred_operation.parameters)[datatype_index] algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm) adapter_instance = ABCAdapter.build_adapter(algorithm.algo_group) adapter_instance.current_project_id = current_project_id prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict) if frame_width is not None: prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height) if isinstance(adapter_instance, ABCMPLH5Displayer) and is_preview is True: prepared_inputs[ABCMPLH5Displayer.SHOW_FULL_TOOLBAR] = False if is_preview: result = eval( "adapter_instance.generate_preview(**prepared_inputs)") else: result = eval("adapter_instance.launch(**prepared_inputs)") return result, parameters_dict
def get_portlet_status(portlet_cfg): """ Get the status of a portlet configuration. """ status = model.BurstConfiguration.BURST_FINISHED error_msg = '' if len(portlet_cfg.analyzers): for analyze_step in portlet_cfg.analyzers: operation = dao.get_operation_by_id(analyze_step.fk_operation) if operation is None: status = model.BurstConfiguration.BURST_ERROR error_msg = "Operation has been removed" break if operation.status == model.STATUS_STARTED: status = model.BurstConfiguration.BURST_RUNNING break if operation.status == model.STATUS_ERROR: status = model.BurstConfiguration.BURST_ERROR error_msg = operation.additional_info break if operation.status == model.STATUS_CANCELED: status = model.BurstConfiguration.BURST_CANCELED break else: ## Simulator is first step so now decide if we are waiting for input or output ## visualizer = portlet_cfg.visualizer wait_on_outputs = False for entry in visualizer.dynamic_param: if type(visualizer.dynamic_param[entry][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) == IntType: wait_on_outputs = True break if wait_on_outputs: simulator_step = dao.get_workflow_step_by_step_index(visualizer.fk_workflow, 0) operation = dao.get_operation_by_id(simulator_step.fk_operation) if operation is None: status = model.BurstConfiguration.BURST_ERROR error_msg = ("At least one simulation result was not found, it might have been removed. <br\>" "You can copy and relaunch current simulation, if you are interested in having " "your results re-computed.") elif operation.status == model.STATUS_STARTED: status = model.BurstConfiguration.BURST_RUNNING elif operation.status == model.STATUS_ERROR: status = model.BurstConfiguration.BURST_ERROR error_msg = operation.additional_info elif operation.status == model.STATUS_CANCELED: status = model.BurstConfiguration.BURST_CANCELED return status, error_msg