class SimulatorService(object): def __init__(self): self.logger = get_logger(self.__class__.__module__) self.burst_service = BurstService() self.operation_service = OperationService() self.files_helper = FilesHelper() def _reset_model(self, session_stored_simulator): session_stored_simulator.model = type(session_stored_simulator.model)() vi_indexes = MonitorForm.determine_indexes_for_chosen_vars_of_interest( session_stored_simulator) vi_indexes = numpy.array(list(vi_indexes.values())) for monitor in session_stored_simulator.monitors: monitor.variables_of_interest = vi_indexes def reset_at_connectivity_change(self, is_simulator_copy, form, session_stored_simulator): """ In case the user copies a simulation and changes the Connectivity, we want to reset the Model and Noise parameters because they might not fit to the new Connectivity's nr of regions. """ if is_simulator_copy and form.connectivity.value != session_stored_simulator.connectivity: self._reset_model(session_stored_simulator) if issubclass(type(session_stored_simulator.integrator), IntegratorStochastic): session_stored_simulator.integrator.noise = type( session_stored_simulator.integrator.noise)() def reset_at_surface_change(self, is_simulator_copy, form, session_stored_simulator): """ In case the user copies a surface-simulation and changes the Surface, we want to reset the Model parameters because they might not fit to the new Surface's nr of vertices. """ if is_simulator_copy and ( session_stored_simulator.surface is None and form.surface.value or session_stored_simulator.surface and form.surface.value != session_stored_simulator.surface.surface_gid): self._reset_model(session_stored_simulator) @staticmethod def _set_simulator_range_parameter(simulator, range_parameter_name, range_parameter_value): range_param_name_list = range_parameter_name.split('.') current_attr = simulator for param_name in range_param_name_list[:len(range_param_name_list) - 1]: current_attr = getattr(current_attr, param_name) setattr(current_attr, range_param_name_list[-1], range_parameter_value) def async_launch_and_prepare_simulation(self, burst_config, user, project, simulator_algo, session_stored_simulator): try: operation = self.operation_service.prepare_operation( user.id, project.id, simulator_algo, session_stored_simulator.gid) ga = self.operation_service._prepare_metadata( simulator_algo.algorithm_category, {}, None, burst_config.gid) session_stored_simulator.generic_attributes = ga storage_path = self.files_helper.get_project_folder( project, str(operation.id)) h5.store_view_model(session_stored_simulator, storage_path) burst_config = self.burst_service.update_simulation_fields( burst_config.id, operation.id, session_stored_simulator.gid) self.burst_service.store_burst_configuration( burst_config, storage_path) wf_errs = 0 try: OperationService().launch_operation(operation.id, True) return operation except Exception as excep: self.logger.error(excep) wf_errs += 1 if burst_config: self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug( "Finished launching workflow. The operation was launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) if burst_config: self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) def prepare_simulation_on_server(self, user_id, project, algorithm, zip_folder_path, simulator_file): simulator_vm = h5.load_view_model_from_file(simulator_file) operation = self.operation_service.prepare_operation( user_id, project.id, algorithm, simulator_vm.gid) storage_operation_path = self.files_helper.get_project_folder( project, str(operation.id)) self.async_launch_simulation_on_server(operation, zip_folder_path, storage_operation_path) return operation def async_launch_simulation_on_server(self, operation, zip_folder_path, storage_operation_path): try: for file in os.listdir(zip_folder_path): shutil.move(os.path.join(zip_folder_path, file), storage_operation_path) try: OperationService().launch_operation(operation.id, True) shutil.rmtree(zip_folder_path) return operation except Exception as excep: self.logger.error(excep) except Exception as excep: self.logger.error(excep) @staticmethod def _set_range_param_in_dict(param_value): if type(param_value) is numpy.ndarray: return param_value[0] elif isinstance(param_value, uuid.UUID): return param_value.hex else: return param_value def async_launch_and_prepare_pse(self, burst_config, user, project, simulator_algo, range_param1, range_param2, session_stored_simulator): try: algo_category = simulator_algo.algorithm_category operation_group = burst_config.operation_group metric_operation_group = burst_config.metric_operation_group operations = [] range_param2_values = [None] if range_param2: range_param2_values = range_param2.get_range_values() first_simulator = None ga = self.operation_service._prepare_metadata( simulator_algo.algorithm_category, {}, operation_group, burst_config.gid) session_stored_simulator.generic_attributes = ga for param1_value in range_param1.get_range_values(): for param2_value in range_param2_values: # Copy, but generate a new GUID for every Simulator in PSE simulator = copy.deepcopy(session_stored_simulator) simulator.gid = uuid.uuid4() self._set_simulator_range_parameter( simulator, range_param1.name, param1_value) ranges = { range_param1.name: self._set_range_param_in_dict(param1_value) } if param2_value is not None: self._set_simulator_range_parameter( simulator, range_param2.name, param2_value) ranges[ range_param2.name] = self._set_range_param_in_dict( param2_value) ranges = json.dumps(ranges) operation = self.operation_service.prepare_operation( user.id, project.id, simulator_algo, simulator.gid, operation_group, ranges) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) h5.store_view_model(simulator, storage_path) operations.append(operation) if first_simulator is None: first_simulator = simulator first_operation = operations[0] storage_path = self.files_helper.get_project_folder( project, str(first_operation.id)) burst_config = self.burst_service.update_simulation_fields( burst_config.id, first_operation.id, first_simulator.gid) self.burst_service.store_burst_configuration( burst_config, storage_path) datatype_group = DataTypeGroup( operation_group, operation_id=first_operation.id, fk_parent_burst=burst_config.gid, state=algo_category.defaultdatastate) dao.store_entity(datatype_group) metrics_datatype_group = DataTypeGroup( metric_operation_group, fk_parent_burst=burst_config.gid) dao.store_entity(metrics_datatype_group) wf_errs = 0 for operation in operations: try: OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operations) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") return first_operation except Exception as excep: self.logger.error(excep) self.burst_service.mark_burst_finished(burst_config, error_message=str(excep)) def load_from_zip(self, zip_file, project): import_service = ImportService() simulator_folder = import_service.import_simulator_configuration_zip( zip_file) simulator_h5_filename = DirLoader( simulator_folder, None).find_file_for_has_traits_type(SimulatorAdapterModel) simulator_h5_filepath = os.path.join(simulator_folder, simulator_h5_filename) simulator = h5.load_view_model_from_file(simulator_h5_filepath) burst_config = self.burst_service.load_burst_configuration_from_folder( simulator_folder, project) return simulator, burst_config
class SimulatorService(object): def __init__(self): self.logger = get_logger(self.__class__.__module__) self.burst_service = BurstService() self.operation_service = OperationService() self.files_helper = FilesHelper() @transactional def _prepare_operation(self, project_id, user_id, simulator_id, simulator_gid, algo_category, op_group, metadata, ranges=None): operation_parameters = json.dumps({'gid': simulator_gid.hex}) metadata, user_group = self.operation_service._prepare_metadata( metadata, algo_category, op_group, {}) meta_str = json.dumps(metadata) op_group_id = None if op_group: op_group_id = op_group.id operation = Operation(user_id, project_id, simulator_id, operation_parameters, op_group_id=op_group_id, meta=meta_str, range_values=ranges) self.logger.info("Saving Operation(userId=" + str(user_id) + ", projectId=" + str(project_id) + "," + str(metadata) + ", algorithmId=" + str(simulator_id) + ", ops_group= " + str(op_group_id) + ", params=" + str(operation_parameters) + ")") operation = dao.store_entity(operation) # TODO: prepare portlets/handle operation groups/no workflows return operation @staticmethod def _set_simulator_range_parameter(simulator, range_parameter_name, range_parameter_value): range_param_name_list = range_parameter_name.split('.') current_attr = simulator for param_name in range_param_name_list[:len(range_param_name_list) - 1]: current_attr = getattr(current_attr, param_name) setattr(current_attr, range_param_name_list[-1], range_parameter_value) def async_launch_and_prepare_simulation(self, burst_config, user, project, simulator_algo, session_stored_simulator, simulation_state_gid): try: metadata = {} metadata.update({DataTypeMetaData.KEY_BURST: burst_config.id}) simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation = self._prepare_operation(project.id, user.id, simulator_id, session_stored_simulator.gid, algo_category, None, metadata) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) SimulatorSerializer().serialize_simulator(session_stored_simulator, simulation_state_gid, storage_path) burst_config = self.burst_service.update_simulation_fields( burst_config.id, operation.id, session_stored_simulator.gid) self.burst_service.store_burst_configuration( burst_config, storage_path) wf_errs = 0 try: OperationService().launch_operation(operation.id, True) return operation except Exception as excep: self.logger.error(excep) wf_errs += 1 if burst_config: self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug( "Finished launching workflow. The operation was launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) if burst_config: self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) def prepare_simulation_on_server(self, user_id, project, algorithm, zip_folder_path, simulator_file): with SimulatorH5(simulator_file) as simulator_h5: simulator_gid = simulator_h5.gid.load() metadata = {} simulator_id = algorithm.id algo_category = algorithm.algorithm_category operation = self._prepare_operation(project.id, user_id, simulator_id, simulator_gid, algo_category, None, metadata) storage_operation_path = self.files_helper.get_project_folder( project, str(operation.id)) self.async_launch_simulation_on_server(operation, zip_folder_path, storage_operation_path) return operation def async_launch_simulation_on_server(self, operation, zip_folder_path, storage_operation_path): try: for file in os.listdir(zip_folder_path): shutil.move(os.path.join(zip_folder_path, file), storage_operation_path) try: OperationService().launch_operation(operation.id, True) shutil.rmtree(zip_folder_path) return operation except Exception as excep: self.logger.error(excep) except Exception as excep: self.logger.error(excep) @staticmethod def _set_range_param_in_dict(param_value): if type(param_value) is numpy.ndarray: return param_value[0] elif isinstance(param_value, uuid.UUID): return param_value.hex else: return param_value def async_launch_and_prepare_pse(self, burst_config, user, project, simulator_algo, range_param1, range_param2, session_stored_simulator): try: simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation_group = burst_config.operation_group metric_operation_group = burst_config.metric_operation_group operations = [] range_param2_values = [None] if range_param2: range_param2_values = range_param2.get_range_values() first_simulator = None for param1_value in range_param1.get_range_values(): for param2_value in range_param2_values: # Copy, but generate a new GUID for every Simulator in PSE simulator = copy.deepcopy(session_stored_simulator) simulator.gid = uuid.uuid4() self._set_simulator_range_parameter( simulator, range_param1.name, param1_value) ranges = { range_param1.name: self._set_range_param_in_dict(param1_value) } if param2_value is not None: self._set_simulator_range_parameter( simulator, range_param2.name, param2_value) ranges[ range_param2.name] = self._set_range_param_in_dict( param2_value) ranges = json.dumps(ranges) operation = self._prepare_operation( project.id, user.id, simulator_id, simulator.gid, algo_category, operation_group, {DataTypeMetaData.KEY_BURST: burst_config.id}, ranges) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) SimulatorSerializer().serialize_simulator( simulator, None, storage_path) operations.append(operation) if first_simulator is None: first_simulator = simulator first_operation = operations[0] storage_path = self.files_helper.get_project_folder( project, str(first_operation.id)) burst_config = self.burst_service.update_simulation_fields( burst_config.id, first_operation.id, first_simulator.gid) self.burst_service.store_burst_configuration( burst_config, storage_path) datatype_group = DataTypeGroup( operation_group, operation_id=first_operation.id, fk_parent_burst=burst_config.id, state=json.loads( first_operation.meta_data)[DataTypeMetaData.KEY_STATE]) dao.store_entity(datatype_group) metrics_datatype_group = DataTypeGroup( metric_operation_group, fk_parent_burst=burst_config.id) dao.store_entity(metrics_datatype_group) wf_errs = 0 for operation in operations: try: OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operations) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) self.burst_service.mark_burst_finished(burst_config, error_message=str(excep)) def load_from_zip(self, zip_file, project): import_service = ImportService() simulator_folder = import_service.import_simulator_configuration_zip( zip_file) simulator_h5_filename = DirLoader( simulator_folder, None).find_file_for_has_traits_type(Simulator) with SimulatorH5(os.path.join(simulator_folder, simulator_h5_filename)) as sim_h5: simulator_gid = sim_h5.gid.load() simulator = SimulatorSerializer.deserialize_simulator( simulator_gid, simulator_folder) burst_config = self.burst_service.load_burst_configuration_from_folder( simulator_folder, project) return simulator, burst_config
class SimulatorService(object): MAX_BURSTS_DISPLAYED = 50 LAUNCH_NEW = 'new' LAUNCH_BRANCH = 'branch' def __init__(self): self.logger = get_logger(self.__class__.__module__) self.operation_service = OperationService() self.files_helper = FilesHelper() @staticmethod def serialize_simulator(simulator, simulator_gid, simulation_state_gid, storage_path): simulator_path = h5.path_for(storage_path, SimulatorH5, simulator_gid) with SimulatorH5(simulator_path) as simulator_h5: simulator_h5.gid.store(uuid.UUID(simulator_gid)) simulator_h5.store(simulator) simulator_h5.connectivity.store(simulator.connectivity.gid) if simulator.stimulus: simulator_h5.stimulus.store(uuid.UUID(simulator.stimulus.gid)) if simulation_state_gid: simulator_h5.simulation_state.store(uuid.UUID(simulation_state_gid)) return simulator_gid @staticmethod def deserialize_simulator(simulator_gid, storage_path): simulator_in_path = h5.path_for(storage_path, SimulatorH5, simulator_gid) simulator_in = Simulator() with SimulatorH5(simulator_in_path) as simulator_in_h5: simulator_in_h5.load_into(simulator_in) connectivity_gid = simulator_in_h5.connectivity.load() stimulus_gid = simulator_in_h5.stimulus.load() simulation_state_gid = simulator_in_h5.simulation_state.load() conn_index = dao.get_datatype_by_gid(connectivity_gid.hex) conn = h5.load_from_index(conn_index) simulator_in.connectivity = conn if simulator_in.surface: cortex_path = h5.path_for(storage_path, CortexH5, simulator_in.surface.gid) with CortexH5(cortex_path) as cortex_h5: local_conn_gid = cortex_h5.local_connectivity.load() region_mapping_gid = cortex_h5.region_mapping_data.load() region_mapping_index = dao.get_datatype_by_gid(region_mapping_gid.hex) region_mapping_path = h5.path_for_stored_index(region_mapping_index) region_mapping = RegionMapping() with RegionMappingH5(region_mapping_path) as region_mapping_h5: region_mapping_h5.load_into(region_mapping) region_mapping.gid = region_mapping_h5.gid.load() surf_gid = region_mapping_h5.surface.load() surf_index = dao.get_datatype_by_gid(surf_gid.hex) surf_h5 = h5.h5_file_for_index(surf_index) surf = CorticalSurface() surf_h5.load_into(surf) surf_h5.close() region_mapping.surface = surf simulator_in.surface.region_mapping_data = region_mapping if local_conn_gid: local_conn_index = dao.get_datatype_by_gid(local_conn_gid.hex) local_conn = h5.load_from_index(local_conn_index) simulator_in.surface.local_connectivity = local_conn if stimulus_gid: stimulus_index = dao.get_datatype_by_gid(stimulus_gid.hex) stimulus = h5.load_from_index(stimulus_index) simulator_in.stimulus = stimulus return simulator_in, connectivity_gid, simulation_state_gid @transactional def _prepare_operation(self, project_id, user_id, simulator_id, simulator_index, algo_category, op_group, metadata, ranges=None): operation_parameters = json.dumps({'simulator_gid': simulator_index.gid}) metadata, user_group = self.operation_service._prepare_metadata(metadata, algo_category, op_group, {}) meta_str = json.dumps(metadata) op_group_id = None if op_group: op_group_id = op_group.id operation = Operation(user_id, project_id, simulator_id, operation_parameters, op_group_id=op_group_id, meta=meta_str, range_values=ranges) self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) + "," + str(metadata) + ",algorithmId=" + str(simulator_id) + ", ops_group= " + str( op_group_id) + ")") # visible_operation = visible and category.display is False operation = dao.store_entity(operation) # operation.visible = visible_operation # TODO: prepare portlets/handle operation groups/no workflows return operation @staticmethod def _set_simulator_range_parameter(simulator, range_parameter_name, range_parameter_value): range_param_name_list = range_parameter_name.split('.') current_attr = simulator for param_name in range_param_name_list[:len(range_param_name_list) - 1]: current_attr = getattr(current_attr, param_name) setattr(current_attr, range_param_name_list[-1], range_parameter_value) def async_launch_and_prepare_simulation(self, burst_config, user, project, simulator_algo, session_stored_simulator, simulation_state_gid): try: simulator_index = SimulatorIndex() metadata = {} if burst_config: simulator_index.fk_parent_burst = burst_config.id metadata.update({DataTypeMetaData.KEY_BURST: burst_config.id}) dao.store_entity(simulator_index) simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation = self._prepare_operation(project.id, user.id, simulator_id, simulator_index, algo_category, None, metadata) simulator_index.fk_from_operation = operation.id dao.store_entity(simulator_index) storage_path = self.files_helper.get_project_folder(project, str(operation.id)) self.serialize_simulator(session_stored_simulator, simulator_index.gid, simulation_state_gid, storage_path) wf_errs = 0 try: OperationService().launch_operation(operation.id, True) return operation except Exception as excep: self.logger.error(excep) wf_errs += 1 if burst_config: BurstService2().mark_burst_finished(burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflow. The operation was launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) if burst_config: BurstService2().mark_burst_finished(burst_config, error_message=str(excep)) def async_launch_and_prepare_pse(self, burst_config, user, project, simulator_algo, range_param1, range_param2, session_stored_simulator): try: simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation_group = burst_config.operation_group metric_operation_group = burst_config.metric_operation_group operations = [] range_param2_values = [] if range_param2: range_param2_values = range_param2.get_range_values() for param1_value in range_param1.get_range_values(): for param2_value in range_param2_values: simulator = copy.deepcopy(session_stored_simulator) self._set_simulator_range_parameter(simulator, range_param1.name, param1_value) self._set_simulator_range_parameter(simulator, range_param2.name, param2_value) simulator_index = SimulatorIndex() simulator_index.fk_parent_burst = burst_config.id simulator_index = dao.store_entity(simulator_index) ranges = json.dumps({range_param1.name: param1_value[0], range_param2.name: param2_value[0]}) operation = self._prepare_operation(project.id, user.id, simulator_id, simulator_index, algo_category, operation_group, {DataTypeMetaData.KEY_BURST: burst_config.id}, ranges) simulator_index.fk_from_operation = operation.id dao.store_entity(simulator_index) storage_path = self.files_helper.get_project_folder(project, str(operation.id)) self.serialize_simulator(simulator, simulator_index.gid, None, storage_path) operations.append(operation) first_operation = operations[0] datatype_group = DataTypeGroup(operation_group, operation_id=first_operation.id, fk_parent_burst=burst_config.id, state=json.loads(first_operation.meta_data)[DataTypeMetaData.KEY_STATE]) dao.store_entity(datatype_group) metrics_datatype_group = DataTypeGroup(metric_operation_group, fk_parent_burst=burst_config.id) dao.store_entity(metrics_datatype_group) wf_errs = 0 for operation in operations: try: OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 BurstService2().mark_burst_finished(burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operations) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) BurstService2().mark_burst_finished(burst_config, error_message=str(excep))
class SimulatorService(object): MAX_BURSTS_DISPLAYED = 50 LAUNCH_NEW = 'new' LAUNCH_BRANCH = 'branch' def __init__(self): self.logger = get_logger(self.__class__.__module__) self.operation_service = OperationService() self.files_helper = FilesHelper() @transactional def _prepare_operation(self, project_id, user_id, simulator_id, simulator_index, algo_category, op_group, metadata, ranges=None): operation_parameters = json.dumps({'gid': simulator_index.gid}) metadata, user_group = self.operation_service._prepare_metadata( metadata, algo_category, op_group, {}) meta_str = json.dumps(metadata) op_group_id = None if op_group: op_group_id = op_group.id operation = Operation(user_id, project_id, simulator_id, operation_parameters, op_group_id=op_group_id, meta=meta_str, range_values=ranges) self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) + "," + str(metadata) + ",algorithmId=" + str(simulator_id) + ", ops_group= " + str(op_group_id) + ")") # visible_operation = visible and category.display is False operation = dao.store_entity(operation) # operation.visible = visible_operation # TODO: prepare portlets/handle operation groups/no workflows return operation @staticmethod def _set_simulator_range_parameter(simulator, range_parameter_name, range_parameter_value): range_param_name_list = range_parameter_name.split('.') current_attr = simulator for param_name in range_param_name_list[:len(range_param_name_list) - 1]: current_attr = getattr(current_attr, param_name) setattr(current_attr, range_param_name_list[-1], range_parameter_value) def async_launch_and_prepare_simulation(self, burst_config, user, project, simulator_algo, session_stored_simulator, simulation_state_gid): try: simulator_index = SimulatorIndex() metadata = {} if burst_config: simulator_index.fk_parent_burst = burst_config.id metadata.update({DataTypeMetaData.KEY_BURST: burst_config.id}) dao.store_entity(simulator_index) simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation = self._prepare_operation(project.id, user.id, simulator_id, simulator_index, algo_category, None, metadata) simulator_index.fk_from_operation = operation.id dao.store_entity(simulator_index) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) SimulatorSerializer().serialize_simulator(session_stored_simulator, simulator_index.gid, simulation_state_gid, storage_path) wf_errs = 0 try: OperationService().launch_operation(operation.id, True) return operation except Exception as excep: self.logger.error(excep) wf_errs += 1 if burst_config: BurstService().mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug( "Finished launching workflow. The operation was launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) if burst_config: BurstService().mark_burst_finished(burst_config, error_message=str(excep)) def prepare_simulation_on_server(self, user_id, project, algorithm, zip_folder_path, simulator_file): with SimulatorH5(simulator_file) as simulator_h5: simulator_gid = simulator_h5.gid.load() simulator_index = SimulatorIndex() simulator_index.gid = simulator_gid.hex metadata = {} simulator_id = algorithm.id algo_category = algorithm.algorithm_category operation = self._prepare_operation(project.id, user_id, simulator_id, simulator_index, algo_category, None, metadata) storage_operation_path = self.files_helper.get_project_folder( project, str(operation.id)) self.async_launch_simulation_on_server(operation, zip_folder_path, storage_operation_path) return operation def async_launch_simulation_on_server(self, operation, zip_folder_path, storage_operation_path): try: for file in os.listdir(zip_folder_path): shutil.move(os.path.join(zip_folder_path, file), storage_operation_path) try: OperationService().launch_operation(operation.id, True) shutil.rmtree(zip_folder_path) return operation except Exception as excep: self.logger.error(excep) except Exception as excep: self.logger.error(excep) def async_launch_and_prepare_pse(self, burst_config, user, project, simulator_algo, range_param1, range_param2, session_stored_simulator): try: simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation_group = burst_config.operation_group metric_operation_group = burst_config.metric_operation_group operations = [] range_param2_values = [] if range_param2: range_param2_values = range_param2.get_range_values() for param1_value in range_param1.get_range_values(): for param2_value in range_param2_values: simulator = copy.deepcopy(session_stored_simulator) self._set_simulator_range_parameter( simulator, range_param1.name, param1_value) self._set_simulator_range_parameter( simulator, range_param2.name, param2_value) simulator_index = SimulatorIndex() simulator_index.fk_parent_burst = burst_config.id simulator_index = dao.store_entity(simulator_index) ranges = json.dumps({ range_param1.name: param1_value[0], range_param2.name: param2_value[0] }) operation = self._prepare_operation( project.id, user.id, simulator_id, simulator_index, algo_category, operation_group, {DataTypeMetaData.KEY_BURST: burst_config.id}, ranges) simulator_index.fk_from_operation = operation.id dao.store_entity(simulator_index) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) SimulatorSerializer().serialize_simulator( simulator, simulator_index.gid, None, storage_path) operations.append(operation) first_operation = operations[0] datatype_group = DataTypeGroup( operation_group, operation_id=first_operation.id, fk_parent_burst=burst_config.id, state=json.loads( first_operation.meta_data)[DataTypeMetaData.KEY_STATE]) dao.store_entity(datatype_group) metrics_datatype_group = DataTypeGroup( metric_operation_group, fk_parent_burst=burst_config.id) dao.store_entity(metrics_datatype_group) wf_errs = 0 for operation in operations: try: OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 BurstService().mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operations) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) BurstService().mark_burst_finished(burst_config, error_message=str(excep))