def stage_out_to_operation_folder(working_dir, operation, simulator_gid): # type: (Storage, Operation, typing.Union[uuid.UUID, str]) -> (list, Operation, str) encrypted_files = HPCSchedulerClient._stage_out_results( working_dir, simulator_gid) simulation_results = list() metric_encrypted_file = None metric_vm_encrypted_file = None for encrypted_file in encrypted_files: if os.path.basename(encrypted_file).startswith( DatatypeMeasureH5.file_name_base()): metric_encrypted_file = encrypted_file elif os.path.basename(encrypted_file).startswith( MEASURE_METRICS_MODEL_CLASS): metric_vm_encrypted_file = encrypted_file else: simulation_results.append(encrypted_file) encryption_handler = StorageInterface.get_encryption_handler( simulator_gid) metric_op, metric_file = HPCSchedulerClient._handle_metric_results( metric_encrypted_file, metric_vm_encrypted_file, operation, encryption_handler) project = dao.get_project_by_id(operation.fk_launched_in) operation_dir = HPCSchedulerClient.storage_interface.get_project_folder( project.name, str(operation.id)) h5_filenames = encryption_handler.decrypt_files_to_dir( simulation_results, operation_dir) encryption_handler.cleanup_encryption_handler() LOGGER.info("Decrypted h5: {}".format(h5_filenames)) LOGGER.info("Metric op: {}".format(metric_op)) LOGGER.info("Metric file: {}".format(metric_file)) return h5_filenames, metric_op, metric_file
def stage_out_to_operation_folder(working_dir, operation, simulator_gid): # type: (Storage, Operation, typing.Union[uuid.UUID, str]) -> (list, Operation, str) encrypted_files = HPCSchedulerClient._stage_out_results( working_dir, simulator_gid) encryption_handler = EncryptionHandler(simulator_gid) simulation_results = list() metric_op = None metric_file = None for encrypted_file in encrypted_files: if os.path.basename(encrypted_file).startswith( DatatypeMeasureH5.file_name_base()): metric_op_dir, metric_op = BurstService.prepare_metrics_operation( operation) metric_files = encryption_handler.decrypt_files_to_dir( [encrypted_file], metric_op_dir) metric_file = metric_files[0] else: simulation_results.append(encrypted_file) project = dao.get_project_by_id(operation.fk_launched_in) operation_dir = HPCSchedulerClient.file_handler.get_project_folder( project, str(operation.id)) h5_filenames = EncryptionHandler(simulator_gid).decrypt_files_to_dir( simulation_results, operation_dir) return h5_filenames, metric_op, metric_file
def stage_out_to_operation_folder(working_dir, operation, simulator_gid): # type: (Storage, Operation, typing.Union[uuid.UUID, str]) -> (list, Operation, str) encrypted_files = HPCSchedulerClient._stage_out_results( working_dir, simulator_gid) encryption_handler = EncryptionHandler(simulator_gid) simulation_results = list() metric_encrypted_file = None metric_vm_encrypted_file = None for encrypted_file in encrypted_files: if os.path.basename(encrypted_file).startswith( DatatypeMeasureH5.file_name_base()): metric_encrypted_file = encrypted_file elif os.path.basename(encrypted_file).startswith( TimeseriesMetricsAdapterModel.__name__): metric_vm_encrypted_file = encrypted_file else: simulation_results.append(encrypted_file) metric_op, metric_file = HPCSchedulerClient._handle_metric_results( metric_encrypted_file, metric_vm_encrypted_file, operation, encryption_handler) project = dao.get_project_by_id(operation.fk_launched_in) operation_dir = HPCSchedulerClient.file_handler.get_project_folder( project, str(operation.id)) h5_filenames = EncryptionHandler(simulator_gid).decrypt_files_to_dir( simulation_results, operation_dir) return h5_filenames, metric_op, metric_file
def launch(self, view_model): # type: (TimeseriesMetricsAdapterModel) -> [DatatypeMeasureIndex] """ Launch algorithm and build results. :param time_series: the time series on which the algorithms are run :param algorithms: the algorithms to be run for computing measures on the time series :type algorithms: any subclass of BaseTimeseriesMetricAlgorithm (KuramotoIndex, GlobalVariance, VarianceNodeVariance) :rtype: `DatatypeMeasureIndex` """ algorithms = view_model.algorithms if algorithms is None or len(algorithms) == 0: algorithms = list(ALGORITHMS) self.log.debug("time_series shape is %s" % str(self.input_shape)) dt_timeseries = self.load_traited_by_gid( self.input_time_series_index.gid) metrics_results = {} for algorithm_name in algorithms: algorithm = ALGORITHMS[algorithm_name](time_series=dt_timeseries) if view_model.segment is not None: algorithm.segment = view_model.segment if view_model.start_point is not None: algorithm.start_point = view_model.start_point # Validate that current algorithm's filter is valid. algorithm_filter = TimeseriesMetricsAdapterForm.get_extra_algorithm_filters( ).get(algorithm_name) if algorithm_filter is not None \ and not algorithm_filter.get_python_filter_equivalent(self.input_time_series_index): self.log.warning( 'Measure algorithm will not be computed because of incompatibility on input. ' 'Filters failed on algo: ' + str(algorithm_name)) continue else: self.log.debug("Applying measure: " + str(algorithm_name)) unstored_result = algorithm.evaluate() # ----------------- Prepare a Float object(s) for result ----------------## if isinstance(unstored_result, dict): metrics_results.update(unstored_result) else: metrics_results[algorithm_name] = unstored_result result = DatatypeMeasureIndex() result.fk_source_gid = self.input_time_series_index.gid result.metrics = json.dumps(metrics_results) result_path = h5.path_for(self._get_output_path(), DatatypeMeasureH5, result.gid) with DatatypeMeasureH5(result_path) as result_h5: result_h5.metrics.store(metrics_results) result_h5.analyzed_datatype.store(dt_timeseries) result_h5.gid.store(uuid.UUID(result.gid)) return result
def prepare_index_for_metric_result(self, operation, result_filename, burst): self.logger.debug("Preparing index for metric result in operation {}...".format(operation.id)) index = h5.index_for_h5_file(result_filename)() with DatatypeMeasureH5(result_filename) as dti_h5: index.gid = dti_h5.gid.load().hex index.metrics = json.dumps(dti_h5.metrics.load()) index.fk_source_gid = dti_h5.analyzed_datatype.load().hex index.fk_from_operation = operation.id index.fk_parent_burst = burst.gid datatype_group = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group) self.logger.debug("Found DatatypeGroup with id {} for operation {}".format(datatype_group.id, operation.id)) index.fk_datatype_group = datatype_group.id self.logger.debug("Prepared index {} for results in operation {}...".format(index.summary_info, operation.id)) return index
def build(analyzed_entity_index, analyzed_entity, operation, datatype_group, metrics='{"v": 3}'): measure = DatatypeMeasureIndex() measure.metrics = metrics measure.source = analyzed_entity_index measure.fk_from_operation = operation.id measure.fk_datatype_group = datatype_group.id measure = dao.store_entity(measure) dm = DatatypeMeasure(analyzed_datatype=analyzed_entity, metrics=json.loads(metrics)) dm_path = h5.path_for_stored_index(measure) with DatatypeMeasureH5(dm_path) as dm_h5: dm_h5.store(dm) dm_h5.store_generic_attributes(GenericAttributes()) return measure