def _find_metrics(operations): """ Search for an operation with results. Then get the metrics of the generated data type""" dt_measure = None for operation in operations: if not operation.has_finished: raise LaunchException("Can not display until all operations from this range are finished!") op_results = dao.get_results_for_operation(operation.id) if len(op_results): datatype = op_results[0] if datatype.type == "DatatypeMeasure": ## Load proper entity class from DB. dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0] else: dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') if dt_measure: dt_measure = dt_measure[0] break if dt_measure: return dt_measure.metrics else: raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.")
def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None): """ We suppose that there are max 2 ranges and from each operation results exactly one dataType. :param datatype_group_gid: the group id for the `DataType` to be visualised :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid` :param size_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid` :returns: `ContextDiscretePSE` :raises Exception: when `datatype_group_id` is invalid (not in database) """ datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid) if datatype_group is None: raise Exception("Selected DataTypeGroup is no longer present in the database. " "It might have been remove or the specified id is not the correct one.") operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group) range1_name, range1_values, range1_labels = DiscretePSEAdapter.prepare_range_labels(operation_group, operation_group.range1) range2_name, range2_values, range2_labels = DiscretePSEAdapter.prepare_range_labels(operation_group, operation_group.range2) pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page) pse_context.setRanges(range1_name, range1_values, range1_labels, range2_name, range2_values, range2_labels) final_dict = {} operations = dao.get_operations_in_group(operation_group.id) for operation_ in operations: if not operation_.has_finished: pse_context.has_started_ops = True range_values = eval(operation_.range_values) key_1 = range_values[range1_name] key_2 = model.RANGE_MISSING_STRING if range2_name is not None: key_2 = range_values[range2_name] datatype = None if operation_.status == model.STATUS_FINISHED: datatypes = dao.get_results_for_operation(operation_.id) if len(datatypes) > 0: datatype = datatypes[0] if datatype.type == "DatatypeMeasure": ## Load proper entity class from DB. measures = dao.get_generic_entity(DatatypeMeasure, datatype.id) else: measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') pse_context.prepare_metrics_datatype(measures, datatype) if key_1 not in final_dict: final_dict[key_1] = {} final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype) pse_context.fill_object(final_dict) ## datatypes_dict is not actually used in the drawing of the PSE and actually ## causes problems in case of NaN values, so just remove it before creating the json pse_context.datatypes_dict = {} return pse_context
def launch_visualization(visualization, frame_width=None, frame_height=None, method_name=ABCAdapter.LAUNCH_METHOD, is_preview=True): """ :param visualization: a visualization workflow step """ dynamic_params = visualization.dynamic_param static_params = visualization.static_param parameters_dict = static_params current_project_id = 0 ## Current operation id needed for export mechanism. So far just use ## ## the operation of the workflow_step from which the inputs are taken #### for param in dynamic_params: step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY] datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY] referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index) referred_operation_id = referred_workflow_step.fk_operation referred_operation = dao.get_operation_by_id(referred_operation_id) current_project_id = referred_operation.fk_launched_in if type(datatype_index) is IntType: ## Entry is the output of a previous step ## datatypes = dao.get_results_for_operation(referred_operation_id) parameters_dict[param] = datatypes[datatype_index].gid else: ## Entry is the input of a previous step ### parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index] algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm) adapter_instance = ABCAdapter.build_adapter(algorithm.algo_group) adapter_instance.current_project_id = current_project_id prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict) if frame_width is not None: prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height) if isinstance(adapter_instance, ABCMPLH5Displayer) and is_preview is True: prepared_inputs[ABCMPLH5Displayer.SHOW_FULL_TOOLBAR] = False result = eval("adapter_instance." + method_name + "(**prepared_inputs)") return result, parameters_dict
def prepare_next_step(self, last_executed_op_id): """ If the operation with id 'last_executed_op_id' resulted after the execution of a workflow step then this method will launch the operation corresponding to the next step from the workflow. """ try: current_step, next_workflow_step = self._get_data(last_executed_op_id) if next_workflow_step is not None: operation = dao.get_operation_by_id(next_workflow_step.fk_operation) dynamic_param_names = next_workflow_step.dynamic_workflow_param_names if len(dynamic_param_names) > 0: op_params = json.loads(operation.parameters) for param_name in dynamic_param_names: dynamic_param = op_params[param_name] former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow, dynamic_param[wf_cfg.STEP_INDEX_KEY]) if type(dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]) is IntType: datatypes = dao.get_results_for_operation(former_step.fk_operation) op_params[param_name] = datatypes[dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]].gid else: previous_operation = dao.get_operation_by_id(former_step.fk_operation) op_params[param_name] = json.loads(previous_operation.parameters)[ dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]] operation.parameters = json.dumps(op_params) operation = dao.store_entity(operation) return operation.id else: if current_step is not None: current_workflow = dao.get_workflow_by_id(current_step.fk_workflow) current_workflow.status = current_workflow.STATUS_FINISHED dao.store_entity(current_workflow) burst_entity = dao.get_burst_by_id(current_workflow.fk_burst) parallel_workflows = dao.get_workflows_for_burst(burst_entity.id) all_finished = True for workflow in parallel_workflows: if workflow.status == workflow.STATUS_STARTED: all_finished = False if all_finished: self.mark_burst_finished(burst_entity, success=True) disk_size = dao.get_burst_disk_size(burst_entity.id) # Transform from kB to MB if disk_size > 0: user = dao.get_project_by_id(burst_entity.fk_project).administrator user.used_disk_space = user.used_disk_space + disk_size dao.store_entity(user) else: operation = dao.get_operation_by_id(last_executed_op_id) disk_size = dao.get_disk_size_for_operation(operation.id) # Transform from kB to MB if disk_size > 0: user = dao.get_user_by_id(operation.fk_launched_by) user.used_disk_space = user.used_disk_space + disk_size dao.store_entity(user) return None except Exception, excep: self.logger.error(excep) self.logger.exception(excep) raise WorkflowInterStepsException(excep)
def remove_operation(self, operation_id): """ Remove a given operation """ operation = dao.get_operation_by_id(operation_id) if operation is not None: datatypes_for_op = dao.get_results_for_operation(operation_id) for dt in datatypes_for_op: self.remove_datatype(operation.project.id, dt.gid, True) dao.remove_entity(model.Operation, operation.id) else: self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)
def remove_operation(self, operation_id): """ Remove a given operation """ operation = dao.try_get_operation_by_id(operation_id) if operation is not None: self.logger.debug("Deleting operation %s " % operation) datatypes_for_op = dao.get_results_for_operation(operation_id) for dt in reversed(datatypes_for_op): self.remove_datatype(operation.project.id, dt.gid, False) dao.remove_entity(model.Operation, operation.id) self.logger.debug("Finished deleting operation %s " % operation) else: self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)
def launch(self, datatype_group, **kwargs): """ Also overwrite launch from ABCDisplayer, since we want to handle a list of figures, instead of only one Matplotlib figure. :raises LaunchException: when called before all operations in the group are finished """ if self.PARAM_FIGURE_SIZE in kwargs: figsize = kwargs[self.PARAM_FIGURE_SIZE] figsize = ((figsize[0]) / 80, (figsize[1]) / 80) del kwargs[self.PARAM_FIGURE_SIZE] else: figsize = (15, 7) operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group) _, range1_name, self.range1 = operation_group.load_range_numbers(operation_group.range1) _, range2_name, self.range2 = operation_group.load_range_numbers(operation_group.range2) for operation in dao.get_operations_in_group(operation_group.id): if operation.status == model.STATUS_STARTED: raise LaunchException("Can not display until all operations from this range are finished!") op_results = dao.get_results_for_operation(operation.id) if len(op_results): datatype = op_results[0] if datatype.type == "DatatypeMeasure": ## Load proper entity class from DB. dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0] else: dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') if dt_measure: dt_measure = dt_measure[0] break else: dt_measure = None figure_nrs = {} metrics = dt_measure.metrics if dt_measure else {} if metrics: for metric in metrics: # Separate plot for each metric. self._create_plot(metric, figsize, operation_group, range1_name, range2_name, figure_nrs) else: raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.") parameters = dict(title=self._ui_name, showFullToolbar=True, serverIp=config.SERVER_IP, serverPort=config.MPLH5_SERVER_PORT, figureNumbers=figure_nrs, metrics=metrics, figuresJSON=json.dumps(figure_nrs)) return self.build_display_result("pse_isocline/view", parameters)
def prepare_node_data(datatype_group): if datatype_group is None: raise Exception("Selected DataTypeGroup is no longer present in the database. " "It might have been remove or the specified id is not the correct one.") operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group) operations = dao.get_operations_in_group(operation_group.id) node_info_dict = dict() for operation_ in operations: datatypes = dao.get_results_for_operation(operation_.id) if len(datatypes) > 0: datatype = datatypes[0] node_info_dict[datatype.gid] = dict(operation_id=operation_.id, datatype_gid=datatype.gid, datatype_type=datatype.type, datatype_subject=datatype.subject, datatype_invalid=datatype.invalid) return node_info_dict
def launch_visualization(visualization, frame_width=None, frame_height=None, method_name=ABCAdapter.LAUNCH_METHOD, is_preview=True): """ :param visualization: a visualization workflow step """ dynamic_params = visualization.dynamic_param static_params = visualization.static_param parameters_dict = static_params operation_id = 0 ## Current operation id needed for export mechanism. So far just use ## ## the operation of the workflow_step from which the inputs are taken #### for param in dynamic_params: step_index = dynamic_params[param][ WorkflowStepConfiguration.STEP_INDEX_KEY] datatype_index = dynamic_params[param][ WorkflowStepConfiguration.DATATYPE_INDEX_KEY] workflow_step = dao.get_workflow_step_by_step_index( visualization.fk_workflow, step_index) if type(datatype_index) is IntType: ## Entry is the output of a previous step ## operation_id = workflow_step.fk_operation datatypes = dao.get_results_for_operation(operation_id) parameters_dict[param] = datatypes[datatype_index].gid else: ## Entry is the input of a previous step ### operation = dao.get_operation_by_id(workflow_step.fk_operation) parameters_dict[param] = json.loads( operation.parameters)[datatype_index] algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm) adapter_instance = ABCAdapter.build_adapter(algorithm.algo_group) prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict) if frame_width is not None: prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height) if isinstance(adapter_instance, ABCMPLH5Displayer) and is_preview is True: prepared_inputs[ABCMPLH5Displayer.SHOW_FULL_TOOLBAR] = False result = eval("adapter_instance." + method_name + "(**prepared_inputs)") return result, parameters_dict, operation_id
def remove_operation(self, operation_id): """ Remove a given operation """ operation = dao.try_get_operation_by_id(operation_id) if operation is not None: self.logger.debug("Deleting operation %s " % operation) datatypes_for_op = dao.get_results_for_operation(operation_id) for dt in reversed(datatypes_for_op): self.remove_datatype(operation.project.id, dt.gid, False) # Here the Operation is mot probably already removed - in case DTs were found inside # but we still remove it for the case when no DTs exist dao.remove_entity(Operation, operation.id) self.storage_interface.remove_operation_data( operation.project.name, operation_id) self.storage_interface.push_folder_to_sync(operation.project.name) self.logger.debug("Finished deleting operation %s " % operation) else: self.logger.warning( "Attempt to delete operation with id=%s which no longer exists." % operation_id)
def prepare_node_data(datatype_group): if datatype_group is None: raise Exception( "Selected DataTypeGroup is no longer present in the database. " "It might have been remove or the specified id is not the correct one." ) operation_group = dao.get_operationgroup_by_id( datatype_group.fk_operation_group) operations = dao.get_operations_in_group(operation_group.id) node_info_dict = dict() for operation_ in operations: datatypes = dao.get_results_for_operation(operation_.id) if len(datatypes) > 0: datatype = datatypes[0] node_info_dict[datatype.gid] = dict( operation_id=operation_.id, datatype_gid=datatype.gid, datatype_type=datatype.type, datatype_subject=datatype.subject, datatype_invalid=datatype.invalid) return node_info_dict
def launch_visualization(visualization, frame_width=None, frame_height=None, is_preview=True): """ :param visualization: a visualization workflow step """ dynamic_params = visualization.dynamic_param static_params = visualization.static_param parameters_dict = static_params current_project_id = 0 # Current operation id needed for export mechanism. So far just use ## # the operation of the workflow_step from which the inputs are taken #### for param in dynamic_params: step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY] datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY] referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index) referred_operation_id = referred_workflow_step.fk_operation referred_operation = dao.get_operation_by_id(referred_operation_id) current_project_id = referred_operation.fk_launched_in if type(datatype_index) is IntType: # Entry is the output of a previous step ## datatypes = dao.get_results_for_operation(referred_operation_id) parameters_dict[param] = datatypes[datatype_index].gid else: # Entry is the input of a previous step ### parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index] algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm) adapter_instance = ABCAdapter.build_adapter(algorithm) adapter_instance.current_project_id = current_project_id prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict) if frame_width is not None: prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height) if is_preview: result = adapter_instance.generate_preview(**prepared_inputs) else: result = adapter_instance.launch(**prepared_inputs) return result, parameters_dict
def _fill_apriori_data(self, operations): """ Gather apriori data from the operations. Also gather the datatype gid's""" for metric in self.metrics: self.apriori_data[metric] = numpy.zeros((self.apriori_x.size, self.apriori_y.size)) # An 2D array of GIDs which is used later to launch overlay for a DataType self.datatypes_gids = [[None for _ in self.range2] for _ in self.range1] for operation in operations: self.log.debug("Gathering data from operation : %s" % operation.id) range_values = eval(operation.range_values) key_1 = range_values[self.range1_name] index_x = self.range1.index(key_1) key_2 = range_values[self.range2_name] index_y = self.range2.index(key_2) if operation.status == STATUS_STARTED: raise LaunchException("Not all operations from this range are complete. Cannot view until then.") operation_results = dao.get_results_for_operation(operation.id) if operation_results: datatype = operation_results[0] self.datatypes_gids[index_x][index_y] = str(datatype.gid) if datatype.type == "DatatypeMeasure": measures = dao.get_generic_entity(DatatypeMeasure, datatype.id) else: measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') else: self.datatypes_gids[index_x][index_y] = None measures = None for metric in self.metrics: if measures: self.apriori_data[metric][index_x][index_y] = measures[0].metrics[metric] else: self.apriori_data[metric][index_x][index_y] = numpy.NaN
def _fill_apriori_data(self, operations): """ Gather apriori data from the operations. Also gather the datatype gid's""" for metric in self.metrics: self.apriori_data[metric] = numpy.zeros((self.apriori_x.size, self.apriori_y.size)) # An 2D array of GIDs which is used later to launch overlay for a DataType self.datatypes_gids = [[None for _ in self.range2] for _ in self.range1] for operation in operations: self.log.debug("Gathering data from operation : %s" % operation.id) range_values = eval(operation.range_values) key_1 = range_values[self.range1_name] index_x = self.range1.index(key_1) key_2 = range_values[self.range2_name] index_y = self.range2.index(key_2) if operation.status == STATUS_STARTED: raise LaunchException("Not all operations from this range are complete. Cannot view until then.") operation_results = dao.get_results_for_operation(operation.id) if operation_results: datatype = operation_results[0] self.datatypes_gids[index_x][index_y] = datatype.gid if datatype.type == "DatatypeMeasure": measures = dao.get_generic_entity(DatatypeMeasure, datatype.id) else: measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') else: self.datatypes_gids[index_x][index_y] = None measures = None for metric in self.metrics: if measures: self.apriori_data[metric][index_x][index_y] = measures[0].metrics[metric] else: self.apriori_data[metric][index_x][index_y] = numpy.NaN
def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None): """ We suppose that there are max 2 ranges and from each operation results exactly one dataType. :param datatype_group_gid: the group id for the `DataType` to be visualised :param back_page: Page where back button will direct :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid` :param size_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid` :returns: `ContextDiscretePSE` :raises Exception: when `datatype_group_id` is invalid (not in database) """ datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid) if datatype_group is None: raise Exception( "Selected DataTypeGroup is no longer present in the database. " "It might have been remove or the specified id is not the correct one." ) operation_group = dao.get_operationgroup_by_id( datatype_group.fk_operation_group) name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels( operation_group, operation_group.range1) name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels( operation_group, operation_group.range2) pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page) pse_context.setRanges(name1, values1, labels1, name2, values2, labels2, only_numbers1 and only_numbers2) final_dict = {} operations = dao.get_operations_in_group(operation_group.id) fake_numbers1 = dict(zip(values1, range(len(list(values1))))) fake_numbers2 = dict(zip(values2, range(len(list(values2))))) for operation_ in operations: if not operation_.has_finished: pse_context.has_started_ops = True range_values = eval(operation_.range_values) key_1 = DiscretePSEAdapter.get_value_on_axe( range_values, only_numbers1, name1, fake_numbers1) key_2 = DiscretePSEAdapter.get_value_on_axe( range_values, only_numbers2, name2, fake_numbers2) datatype = None if operation_.status == model.STATUS_FINISHED: datatypes = dao.get_results_for_operation(operation_.id) if len(datatypes) > 0: datatype = datatypes[0] if datatype.type == "DatatypeMeasure": ## Load proper entity class from DB. measures = dao.get_generic_entity( DatatypeMeasure, datatype.id) else: measures = dao.get_generic_entity( DatatypeMeasure, datatype.gid, '_analyzed_datatype') pse_context.prepare_metrics_datatype(measures, datatype) if key_1 not in final_dict: final_dict[key_1] = {} final_dict[key_1][key_2] = pse_context.build_node_info( operation_, datatype) pse_context.fill_object(final_dict) ## datatypes_dict is not actually used in the drawing of the PSE and actually ## causes problems in case of NaN values, so just remove it before creating the json pse_context.datatypes_dict = {} if not only_numbers1: pse_context.values_x = range(len(list(values1))) if not only_numbers2: pse_context.values_y = range(len(list(values2))) return pse_context
def plot(self, figure, operation_group, metric, range1_name, range2_name): """ Do the plot for the given figure. Also need operation group, metric and ranges in order to compute the data to be plotted. """ operations = dao.get_operations_in_group(operation_group.id) # Data from which to interpolate larger 2-D space apriori_x = numpy.array(self.range1) apriori_y = numpy.array(self.range2) apriori_data = numpy.zeros((apriori_x.size, apriori_y.size)) # An 2D array of GIDs which is used later to launch overlay for a DataType datatypes_gids = [[None for _ in self.range2] for _ in self.range1] for operation_ in operations: range_values = eval(operation_.range_values) key_1 = range_values[range1_name] index_x = self.range1.index(key_1) key_2 = range_values[range2_name] index_y = self.range2.index(key_2) if operation_.status != model.STATUS_FINISHED: raise LaunchException("Not all operations from this range are complete. Cannot view until then.") datatype = dao.get_results_for_operation(operation_.id)[0] datatypes_gids[index_x][index_y] = datatype.gid if datatype.type == "DatatypeMeasure": measures = dao.get_generic_entity(DatatypeMeasure, datatype.id) else: measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') if measures: apriori_data[index_x][index_y] = measures[0].metrics[metric] else: apriori_data[index_x][index_y] = 0 # Attempt order-3 interpolation. kx = ky = 3 if len(self.range1) <= 3 or len(self.range2) <= 3: # Number of points is too small, just do linear interpolation kx = ky = 1 s = interpolate.RectBivariateSpline(apriori_x, apriori_y, apriori_data, kx=kx, ky=ky) # Get data of higher resolution that we'll plot later on posteriori_x = numpy.arange(self.range1[0], self.range1[-1], (self.range1[-1] - self.range1[0]) / RESOLUTION[0]) posteriori_y = numpy.arange(self.range2[0], self.range2[-1], (self.range2[-1] - self.range2[0]) / RESOLUTION[1]) posteriori_data = numpy.rot90(s(posteriori_x, posteriori_y)) self.interp_models[figure.number] = s # Do actual plot. axes = figure.gca() img = axes.imshow(posteriori_data, extent=(min(self.range1), max(self.range1), min(self.range2), max(self.range2)), aspect='auto', interpolation='nearest') axes.set_title("Interpolated values for metric %s" % (metric,)) figure.colorbar(img) axes.set_xlabel(range1_name) axes.set_ylabel(range2_name) def format_coord(x, y): return 'x=%1.4f, y=%1.4f' % (x, y) axes.format_coord = format_coord return datatypes_gids
def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None): """ We suppose that there are max 2 ranges and from each operation results exactly one dataType. :param datatype_group_gid: the group id for the `DataType` to be visualised :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid` :param size_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid` :returns: `ContextDiscretePSE` :raises Exception: when `datatype_group_id` is invalid (not in database) """ datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid) if datatype_group is None: raise Exception( "Selected DataTypeGroup is no longer present in the database. " "It might have been remove or the specified id is not the correct one." ) operation_group = dao.get_operationgroup_by_id( datatype_group.fk_operation_group) _, range1_name, range1_labels = operation_group.load_range_numbers( operation_group.range1) has_range2, range2_name, range2_labels = operation_group.load_range_numbers( operation_group.range2) pse_context = ContextDiscretePSE(datatype_group_gid, range1_labels, range2_labels, color_metric, size_metric, back_page) final_dict = dict() operations = dao.get_operations_in_group(operation_group.id) for operation_ in operations: if operation_.status == model.STATUS_STARTED: pse_context.has_started_ops = True range_values = eval(operation_.range_values) key_1 = range_values[range1_name] key_2 = model.RANGE_MISSING_STRING if has_range2 is not None: key_2 = range_values[range2_name] datatype = None if operation_.status == model.STATUS_FINISHED: datatype = dao.get_results_for_operation(operation_.id)[0] measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') pse_context.prepare_metrics_datatype(measures, datatype) if key_1 not in final_dict: final_dict[key_1] = { key_2: pse_context.build_node_info(operation_, datatype) } else: final_dict[key_1][key_2] = pse_context.build_node_info( operation_, datatype) pse_context.fill_object(final_dict) ## datatypes_dict is not actually used in the drawing of the PSE and actually ## causes problems in case of NaN values, so just remove it before creating the json pse_context.datatypes_dict = {} return pse_context
def prepare_next_step(self, last_executed_op_id): """ If the operation with id 'last_executed_op_id' resulted after the execution of a workflow step then this method will launch the operation corresponding to the next step from the workflow. """ try: current_step, next_workflow_step = self._get_data( last_executed_op_id) if next_workflow_step is not None: operation = dao.get_operation_by_id( next_workflow_step.fk_operation) dynamic_param_names = next_workflow_step.dynamic_workflow_param_names if len(dynamic_param_names) > 0: op_params = json.loads(operation.parameters) for param_name in dynamic_param_names: dynamic_param = op_params[param_name] former_step = dao.get_workflow_step_by_step_index( next_workflow_step.fk_workflow, dynamic_param[wf_cfg.STEP_INDEX_KEY]) if type(dynamic_param[ wf_cfg.DATATYPE_INDEX_KEY]) is IntType: datatypes = dao.get_results_for_operation( former_step.fk_operation) op_params[param_name] = datatypes[dynamic_param[ wf_cfg.DATATYPE_INDEX_KEY]].gid else: previous_operation = dao.get_operation_by_id( former_step.fk_operation) op_params[param_name] = json.loads( previous_operation.parameters)[dynamic_param[ wf_cfg.DATATYPE_INDEX_KEY]] operation.parameters = json.dumps(op_params) operation = dao.store_entity(operation) return operation.id else: if current_step is not None: current_workflow = dao.get_workflow_by_id( current_step.fk_workflow) current_workflow.status = current_workflow.STATUS_FINISHED dao.store_entity(current_workflow) burst_entity = dao.get_burst_by_id( current_workflow.fk_burst) parallel_workflows = dao.get_workflows_for_burst( burst_entity.id) all_finished = True for workflow in parallel_workflows: if workflow.status == workflow.STATUS_STARTED: all_finished = False if all_finished: self.mark_burst_finished(burst_entity, success=True) disk_size = dao.get_burst_disk_size( burst_entity.id) #Transform from kB to MB if disk_size > 0: user = dao.get_project_by_id( burst_entity.fk_project).administrator user.used_disk_space = user.used_disk_space + disk_size dao.store_entity(user) else: operation = dao.get_operation_by_id(last_executed_op_id) disk_size = dao.get_disk_size_for_operation( operation.id) #Transform from kB to MB if disk_size > 0: user = dao.get_user_by_id(operation.fk_launched_by) user.used_disk_space = user.used_disk_space + disk_size dao.store_entity(user) return None except Exception, excep: self.logger.error(excep) self.logger.exception(excep) raise WorkflowInterStepsException(excep)
def get_results_for_operation(operation_id, selected_filter=None): """ Retrieve the DataTypes entities resulted after the execution of the given operation. """ return dao.get_results_for_operation(operation_id, selected_filter)
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] view_categ_id = dao.get_visualisers_categories()[0].id for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[14] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) result["datatype_group_gid"] = datatype_group.gid result["gid"] = operation_group.gid ## Filter only viewers for current DataTypeGroup entity: launcher = self.retrieve_launchers(datatype_group.gid, include_categories=[view_categ_id]).values()[0] view_groups = [] for launcher in launcher.values(): url = '/flow/' + str(launcher['category']) + '/' + str(launcher['id']) if launcher['part_of_group']: url = '/flow/prepare_group_launch/' + datatype_group.gid + '/' + \ str(launcher['category']) + '/' + str(launcher['id']) view_groups.append(dict(name=launcher["displayName"], url=url, param_name=launcher['children'][0]['param_name'], part_of_group=launcher['part_of_group'])) result["view_groups"] = view_groups except Exception: self.logger.exception("We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["method"] = one_op[5] result["user"] = dao.get_user_by_id(one_op[6]) if type(one_op[7]) in (str, unicode): result["create"] = string2date(str(one_op[7])) else: result["create"] = one_op[7] if type(one_op[8]) in (str, unicode): result["start"] = string2date(str(one_op[8])) else: result["start"] = one_op[8] if type(one_op[9]) in (str, unicode): result["complete"] = string2date(str(one_op[9])) else: result["complete"] = one_op[9] if result["complete"] is not None and result["start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[10] result["additional"] = one_op[11] result["visible"] = True if one_op[12] > 0 else False result['operation_tag'] = one_op[13] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type, dt.gid, 'gid')[0] for dt in datatype_results] operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result) except Exception: ## We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception("Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no
def list_operation_results(operation_id): fmt = "%16s %24s %32s %12s" print(fmt % ('id', 'type', 'gid', 'date')) for dt in dao.get_results_for_operation(operation_id): print(fmt % (dt.id, dt.type, dt.gid, dt.create_date))
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) if total_filtered >= start_idx + OPERATIONS_PAGE_SIZE: end_idx = OPERATIONS_PAGE_SIZE else: end_idx = total_filtered - start_idx pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, end_idx) started_ops = 0 if current_ops is None: return selected_project, [], 0 operations = [] for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[14] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) datatype = dao.get_datatype_by_id(datatype_group.id) result["datatype_group_gid"] = datatype.gid result["gid"] = operation_group.gid all_categs = dao.get_algorithm_categories() view_categ = dao.get_visualisers_categories()[0] excludes = [categ.id for categ in all_categs if categ.id != view_categ.id] algo = self.retrieve_launchers("DataTypeGroup", datatype.gid, exclude_categories=excludes).values()[0] view_groups = [] for algo in algo.values(): url = '/flow/' + str(algo['category']) + '/' + str(algo['id']) if algo['part_of_group']: url = '/flow/prepare_group_launch/' + datatype.gid + '/' + \ str(algo['category']) + '/' + str(algo['id']) view_groups.append(dict(name=algo["displayName"], url=url, param_name=algo['children'][0]['param_name'], part_of_group=algo['part_of_group'])) result["view_groups"] = view_groups except Exception, excep: self.logger.error(excep) self.logger.warning("Will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["method"] = one_op[5] result["user"] = dao.get_user_by_id(one_op[6]) if type(one_op[7]) in (str, unicode): result["create"] = string2date(str(one_op[7])) else: result["create"] = one_op[7] if type(one_op[8]) in (str, unicode): result["start"] = string2date(str(one_op[8])) else: result["start"] = one_op[8] if type(one_op[9]) in (str, unicode): result["complete"] = string2date(str(one_op[9])) else: result["complete"] = one_op[9] if result["complete"] is not None and result["start"] is not None: result["duration"] = timedelta2string(result["complete"] - result["start"]) result["status"] = one_op[10] if result["status"] == model.STATUS_STARTED: started_ops += 1 result["additional"] = one_op[11] result["visible"] = True if one_op[12] > 0 else False result['operation_tag'] = one_op[13] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type, dt.gid, 'gid')[0] for dt in datatype_results] operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name, figure.operation.id) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result)
def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None): """ We suppose that there are max 2 ranges and from each operation results exactly one dataType. :param datatype_group_gid: the group id for the `DataType` to be visualised :param back_page: Page where back button will direct :param color_metric: String referring to metric to apply on colors :param size_metric: String referring to metric to apply on sizes :returns: `ContextDiscretePSE` :raises Exception: when `datatype_group_id` is invalid (not in database) """ datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid) if datatype_group is None: raise Exception("Selected DataTypeGroup is no longer present in the database. " "It might have been remove or the specified id is not the correct one.") operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group) name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(operation_group, operation_group.range1) name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(operation_group, operation_group.range2) pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page) pse_context.setRanges(name1, values1, labels1, name2, values2, labels2, only_numbers1 and only_numbers2) final_dict = {} operations = dao.get_operations_in_group(operation_group.id) fake_numbers1 = dict(zip(values1, range(len(list(values1))))) fake_numbers2 = dict(zip(values2, range(len(list(values2))))) for operation_ in operations: if not operation_.has_finished: pse_context.has_started_ops = True range_values = eval(operation_.range_values) key_1 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers1, name1, fake_numbers1) key_2 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers2, name2, fake_numbers2) datatype = None if operation_.status == model.STATUS_FINISHED: datatypes = dao.get_results_for_operation(operation_.id) if len(datatypes) > 0: datatype = datatypes[0] if datatype.type == "DatatypeMeasure": ## Load proper entity class from DB. measures = dao.get_generic_entity(DatatypeMeasure, datatype.id) else: measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') pse_context.prepare_metrics_datatype(measures, datatype) if key_1 not in final_dict: final_dict[key_1] = {} final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype) pse_context.fill_object(final_dict) ## datatypes_dict is not actually used in the drawing of the PSE and actually ## causes problems in case of NaN values, so just remove it before creating the json pse_context.datatypes_dict = {} if not only_numbers1: pse_context.values_x = range(len(list(values1))) if not only_numbers2: pse_context.values_y = range(len(list(values2))) return pse_context
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] view_categ_id = dao.get_visualisers_categories()[0].id for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[13] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) result["datatype_group_gid"] = datatype_group.gid result["gid"] = operation_group.gid ## Filter only viewers for current DataTypeGroup entity: result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid) except Exception: self.logger.exception("We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["user"] = dao.get_user_by_id(one_op[5]) if type(one_op[6]) is str: result["create"] = string2date(str(one_op[6])) else: result["create"] = one_op[6] if type(one_op[7]) is str: result["start"] = string2date(str(one_op[7])) else: result["start"] = one_op[7] if type(one_op[8]) is str: result["complete"] = string2date(str(one_op[8])) else: result["complete"] = one_op[8] if result["complete"] is not None and result["start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[9] result["additional"] = one_op[10] result["visible"] = True if one_op[11] > 0 else False result['operation_tag'] = one_op[12] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [] for dt in datatype_results: dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid) if dt_loaded: result['results'].append(dt_loaded) else: self.logger.warning("Could not retrieve datatype %s" % str(dt)) operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result) except Exception: ## We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception("Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no
def plot(self, figure, operation_group, metric, range1_name, range2_name): """ Do the plot for the given figure. Also need operation group, metric and ranges in order to compute the data to be plotted. """ operations = dao.get_operations_in_group(operation_group.id) # Data from which to interpolate larger 2-D space apriori_x = numpy.array(self.range1) apriori_y = numpy.array(self.range2) apriori_data = numpy.zeros((apriori_x.size, apriori_y.size)) # An 2D array of GIDs which is used later to launch overlay for a DataType datatypes_gids = [[None for _ in self.range2] for _ in self.range1] for operation_ in operations: range_values = eval(operation_.range_values) key_1 = range_values[range1_name] index_x = self.range1.index(key_1) key_2 = range_values[range2_name] index_y = self.range2.index(key_2) if operation_.status == model.STATUS_STARTED: raise LaunchException("Not all operations from this range are complete. Cannot view until then.") operation_results = dao.get_results_for_operation(operation_.id) if operation_results: datatype = operation_results[0] datatypes_gids[index_x][index_y] = datatype.gid if datatype.type == "DatatypeMeasure": measures = dao.get_generic_entity(DatatypeMeasure, datatype.id) else: measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype') else: datatypes_gids[index_x][index_y] = None measures = None if measures: apriori_data[index_x][index_y] = measures[0].metrics[metric] else: apriori_data[index_x][index_y] = numpy.NaN # Convert array to 0 but keep track of nan values so we can replace after interpolation # since interpolating with nan values will just break the whole process nan_indices = numpy.isnan(apriori_data) self.nan_indices[figure.number] = nan_indices apriori_data = numpy.nan_to_num(apriori_data) # Attempt order-3 interpolation. kx = ky = 3 if len(self.range1) <= 3 or len(self.range2) <= 3: # Number of points is too small, just do linear interpolation kx = ky = 1 s = interpolate.RectBivariateSpline(apriori_x, apriori_y, apriori_data, kx=kx, ky=ky) # Get data of higher resolution that we'll plot later on posteriori_x = numpy.arange(self.range1[0], self.range1[-1], (self.range1[-1] - self.range1[0]) / RESOLUTION[0]) posteriori_y = numpy.arange(self.range2[0], self.range2[-1], (self.range2[-1] - self.range2[0]) / RESOLUTION[1]) posteriori_data = s(posteriori_x, posteriori_y) x_granularity = RESOLUTION[0] / len(self.range1) y_granularity = RESOLUTION[1] / len(self.range2) for idx, row in enumerate(nan_indices): for idy, was_nan in enumerate(row): if was_nan: # Now we want to set back all the values that were NaN before interpolation # and keep track of the change in granularity. For this reason for each nan # value we had before, we will now have a matrix of the shape [x_granularity x y_granularity] # full of NaN values start_x = idx * x_granularity end_x = (idx + 1) * x_granularity start_y = idy * y_granularity end_y = (idy + 1) * y_granularity for x_scaled in xrange(start_x, end_x, 1): for y_scaled in xrange(start_y, end_y, 1): posteriori_data[x_scaled, y_scaled] = numpy.NaN # Rotate to get good plot posteriori_data = numpy.rot90(posteriori_data) self.interp_models[figure.number] = s # Do actual plot. axes = figure.gca() img = axes.imshow(posteriori_data, extent=(min(self.range1), max(self.range1), min(self.range2), max(self.range2)), aspect='auto', interpolation='nearest') axes.set_title("Interpolated values for metric %s" % (metric,)) figure.colorbar(img) axes.set_xlabel(range1_name) axes.set_ylabel(range2_name) def format_coord(x, y): return 'x=%1.4f, y=%1.4f' % (x, y) axes.format_coord = format_coord return datatypes_gids
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations( project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + ( 1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[13] operation_group_id = one_op[3] if operation_group_id is not None and operation_group_id: try: operation_group = dao.get_generic_entity( OperationGroup, operation_group_id)[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) result[ "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None result["gid"] = operation_group.gid # Filter only viewers for current DataTypeGroup entity: if datatype_group is None: view_groups = None else: view_groups = AlgorithmService( ).get_visualizers_for_group(datatype_group.gid) result["view_groups"] = view_groups except Exception: self.logger.exception( "We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["user"] = dao.get_user_by_id(one_op[5]) if type(one_op[6]) is str: result["create"] = string2date(str(one_op[6])) else: result["create"] = one_op[6] if type(one_op[7]) is str: result["start"] = string2date(str(one_op[7])) else: result["start"] = one_op[7] if type(one_op[8]) is str: result["complete"] = string2date(str(one_op[8])) else: result["complete"] = one_op[8] if result["complete"] is not None and result[ "start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[9] result["additional"] = one_op[10] result["visible"] = True if one_op[11] > 0 else False result['operation_tag'] = one_op[12] if not result['group']: result['results'] = dao.get_results_for_operation( result['id']) else: result['results'] = None operations.append(result) except Exception: # We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception( "Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no