def create_operation(test_user=None, test_project=None, operation_status=STATUS_FINISHED): """ Create persisted operation. :return: Operation entity after persistence. """ if test_user is None: test_user = TestFactory.create_user() if test_project is None: test_project = TestFactory.create_project(test_user) algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) adapter = ABCAdapter.build_adapter(algorithm) view_model = adapter.get_view_model_class()() view_model.data_file = "." operation = Operation(view_model.gid.hex, test_user.id, test_project.id, algorithm.id, status=operation_status) dao.store_entity(operation) op_dir = StorageInterface().get_project_folder(test_project.name, str(operation.id)) h5.store_view_model(view_model, op_dir) return dao.get_operation_by_id(operation.id)
def build(user=None, project=None, op=None, nr_regions=76, monitor=TemporalAverageViewModel(), with_surface=False, conn_gid=None): model = SimulatorAdapterModel() model.monitors = [monitor] if not op: op = operation_factory(test_user=user, test_project=project) if conn_gid: model.connectivity = conn_gid if not with_surface and not conn_gid: model.connectivity = connectivity_index_factory(nr_regions, op).gid model.simulation_length = 100 if with_surface: rm_idx = region_mapping_index_factory() model.connectivity = rm_idx.fk_connectivity_gid model.surface = CortexViewModel() model.surface.surface_gid = rm_idx.fk_surface_gid model.surface.region_mapping_data = rm_idx.gid model.simulation_length = 10 storage_path = FilesHelper().get_project_folder(op.project, str(op.id)) h5.store_view_model(model, storage_path) return storage_path, model.gid
def test_store_simulator_view_model_eeg(connectivity_index_factory, surface_index_factory, region_mapping_factory, sensors_index_factory, operation_factory): conn = connectivity_index_factory() surface_idx, surface = surface_index_factory(cortical=True) region_mapping = region_mapping_factory() sensors_idx, sensors = sensors_index_factory() proj = ProjectionSurfaceEEG(sensors=sensors, sources=surface, projection_data=numpy.ones(3)) op = operation_factory() storage_path = FilesHelper().get_project_folder(op.project, str(op.id)) prj_db_db = h5.store_complete(proj, storage_path) prj_db_db.fk_from_operation = op.id dao.store_entity(prj_db_db) seeg_monitor = EEGViewModel(projection=proj.gid, sensors=sensors.gid) seeg_monitor.region_mapping = region_mapping.gid.hex sim_view_model = SimulatorAdapterModel() sim_view_model.connectivity = conn.gid sim_view_model.monitors = [seeg_monitor] op = operation_factory() storage_path = FilesHelper().get_project_folder(op.project, str(op.id)) h5.store_view_model(sim_view_model, storage_path) loaded_sim_view_model = h5.load_view_model(sim_view_model.gid, storage_path) assert isinstance(sim_view_model, SimulatorAdapterModel) assert isinstance(loaded_sim_view_model, SimulatorAdapterModel) assert sim_view_model.monitors[0].projection == loaded_sim_view_model.monitors[0].projection
def launch_operation(self, project_gid, algorithm_class, view_model, temp_folder): h5_file_path = h5.path_by_dir(temp_folder, ViewModelH5, view_model.gid) h5.store_view_model(view_model, temp_folder) model_file_obj = open(h5_file_path, 'rb') files = { RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value: (os.path.basename(h5_file_path), model_file_obj) } if issubclass(algorithm_class, ABCUploader): for key in algorithm_class().get_form_class( ).get_upload_information().keys(): path = getattr(view_model, key) data_file_obj = open(path, 'rb') files[key] = (os.path.basename(path), data_file_obj) return self.secured_request().post(self.build_request_url( RestLink.LAUNCH_OPERATION.compute_url( True, { LinkPlaceholder.PROJECT_GID.value: project_gid, LinkPlaceholder.ALG_MODULE.value: algorithm_class.__module__, LinkPlaceholder.ALG_CLASSNAME.value: algorithm_class.__name__ })), files=files)
def create_view_model(self, operation_entity, operation_data, new_op_folder, generic_attributes=None, add_params=None): view_model = self._get_new_form_view_model( operation_entity, operation_data.info_from_xml) if add_params is not None: for element in add_params: key_attr = getattr(view_model, element[0]) setattr(key_attr, element[1], element[2]) view_model.range_values = operation_entity.range_values if operation_entity.operation_group: view_model.operation_group_gid = uuid.UUID( operation_entity.operation_group.gid) view_model.ranges = json.dumps( operation_entity.operation_group.range_references) view_model.is_metric_operation = 'DatatypeMeasure' in operation_entity.operation_group.name if generic_attributes is not None: view_model.generic_attributes = generic_attributes view_model.generic_attributes.operation_tag = operation_entity.user_group h5.store_view_model(view_model, new_op_folder) view_model_disk_size = FilesHelper.compute_recursive_h5_disk_usage( new_op_folder) operation_entity.view_model_disk_size = view_model_disk_size operation_entity.view_model_gid = view_model.gid.hex dao.store_entity(operation_entity) return view_model
def test_adapter_huge_memory_requirement(self, test_adapter_factory): """ Test that an MemoryException is raised in case adapter cant launch due to lack of memory. """ # Prepare adapter test_adapter_factory(adapter_class=TestAdapterHugeMemoryRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHugeMemoryRequired") # Simulate receiving POST data form = TestAdapterHugeMemoryRequiredForm() adapter.submit_form(form) view_model = form.get_view_model()() view_model.test = 5 # Prepare operation for launch operation = Operation(view_model.gid.hex, self.test_user.id, self.test_project.id, adapter.stored_adapter.id, status=STATUS_STARTED) operation = dao.store_entity(operation) # Store ViewModel in H5 parent_folder = FilesHelper().get_project_folder( self.test_project, str(operation.id)) h5.store_view_model(view_model, parent_folder) # Launch operation with pytest.raises(NoMemoryAvailableException): OperationService().initiate_prelaunch(operation, adapter)
def test_server_fire_simulation(self, mocker, connectivity_factory): self._mock_user(mocker) input_folder = self.storage_interface.get_project_folder(self.test_project.name) sim_dir = os.path.join(input_folder, 'test_sim') if not os.path.isdir(sim_dir): os.makedirs(sim_dir) simulator = SimulatorAdapterModel() simulator.connectivity = connectivity_factory().gid h5.store_view_model(simulator, sim_dir) zip_filename = os.path.join(input_folder, RequestFileKey.SIMULATION_FILE_NAME.value) self.storage_interface.write_zip_folder(zip_filename, sim_dir) # Mock flask.request.files to return a dictionary request_mock = mocker.patch.object(flask, 'request', spec={}) fp = open(zip_filename, 'rb') request_mock.files = {RequestFileKey.SIMULATION_FILE_KEY.value: FileStorage(fp, os.path.basename(zip_filename))} def launch_sim(self, user_id, project, algorithm, zip_folder_path, simulator_file): return Operation('', '', '', {}) # Mock simulation launch and current user mocker.patch.object(SimulatorService, 'prepare_simulation_on_server', launch_sim) operation_gid, status = self.simulation_resource.post(project_gid=self.test_project.gid) fp.close() assert type(operation_gid) is str assert status == 201
def store_view_model(operation, project, view_model): storage_path = StorageInterface().get_project_folder( project.name, str(operation.id)) h5.store_view_model(view_model, storage_path) view_model_size_on_disk = StorageInterface.compute_recursive_h5_disk_usage( storage_path) operation.view_model_disk_size = view_model_size_on_disk dao.store_entity(operation)
def store_view_model(operation, project, view_model): storage_path = FilesHelper().get_project_folder( project, str(operation.id)) h5.store_view_model(view_model, storage_path) view_model_size_on_disk = FilesHelper.compute_recursive_h5_disk_usage( storage_path) operation.view_model_disk_size = view_model_size_on_disk dao.store_entity(operation)
def _compute_metrics_for_pse_launch(self, time_series_index): # type: (TimeSeriesIndex) -> [DatatypeMeasureIndex] metric_vm = TimeseriesMetricsAdapterModel() metric_vm.time_series = time_series_index.gid metric_vm.algorithms = tuple(choices.values()) h5.store_view_model(metric_vm, self._get_output_path()) metric_adapter = HPCTimeseriesMetricsAdapter(self._get_output_path(), time_series_index) metric_adapter._prelaunch(None, metric_vm, None, self.available_disk_space)
def build(test_user=None, test_project=None, is_simulation=False, store_vm=False, operation_status=STATUS_FINISHED, range_values=None, conn_gid=None): """ Create persisted operation with a ViewModel stored :return: Operation entity after persistence. """ if test_user is None: test_user = user_factory() if test_project is None: test_project = project_factory(test_user) vm_gid = uuid.uuid4() view_model = None if is_simulation: algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS) if store_vm: adapter = ABCAdapter.build_adapter(algorithm) view_model = adapter.get_view_model_class()() view_model.connectivity = connectivity_factory( 4).gid if conn_gid is None else conn_gid vm_gid = view_model.gid else: algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) if store_vm: adapter = ABCAdapter.build_adapter(algorithm) view_model = adapter.get_view_model_class()() view_model.data_file = "." vm_gid = view_model.gid operation = Operation(vm_gid.hex, test_user.id, test_project.id, algorithm.id, status=operation_status, range_values=range_values) dao.store_entity(operation) if store_vm: op_folder = FilesHelper().get_project_folder( test_project, str(operation.id)) h5.store_view_model(view_model, op_folder) # Make sure lazy attributes are correctly loaded. return dao.get_operation_by_id(operation.id)
def fire_simulation(self, project_gid, session_stored_simulator, temp_folder): temporary_folder = FilesHelper.create_temp_folder() h5.store_view_model(session_stored_simulator, temporary_folder) zip_folder_path = os.path.join(temp_folder, RequestFileKey.SIMULATION_FILE_NAME.value) FilesHelper().zip_folder(zip_folder_path, temporary_folder) shutil.rmtree(temporary_folder) file_obj = open(zip_folder_path, 'rb') return self.secured_request().post(self.build_request_url(RestLink.FIRE_SIMULATION.compute_url(True, { LinkPlaceholder.PROJECT_GID.value: project_gid })), files={RequestFileKey.SIMULATION_FILE_KEY.value: (RequestFileKey.SIMULATION_FILE_NAME.value, file_obj)})
def test_store_simulator_view_model(connectivity_index_factory, operation_factory): conn = connectivity_index_factory() sim_view_model = SimulatorAdapterModel() sim_view_model.connectivity = conn.gid op = operation_factory() storage_path = FilesHelper().get_project_folder(op.project, str(op.id)) h5.store_view_model(sim_view_model, storage_path) loaded_sim_view_model = h5.load_view_model(sim_view_model.gid, storage_path) assert isinstance(sim_view_model, SimulatorAdapterModel) assert isinstance(loaded_sim_view_model, SimulatorAdapterModel)
def test_server_launch_operation(self, mocker, time_series_index_factory): self._mock_user(mocker) algorithm_module = "tvb.adapters.analyzers.fourier_adapter" algorithm_class = "FourierAdapter" input_ts_index = time_series_index_factory() fft_model = FFTAdapterModel() fft_model.time_series = UUID(input_ts_index.gid) fft_model.window_function = list(SUPPORTED_WINDOWING_FUNCTIONS)[0] input_folder = self.storage_interface.get_project_folder(self.test_project.name) view_model_h5_path = h5.store_view_model(fft_model, input_folder) # Mock flask.request.files to return a dictionary request_mock = mocker.patch.object(flask, 'request', spec={}) fp = open(view_model_h5_path, 'rb') request_mock.files = { RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value: FileStorage(fp, os.path.basename(view_model_h5_path))} # Mock launch_operation() call and current_user mocker.patch.object(OperationService, 'launch_operation') operation_gid, status = self.launch_resource.post(project_gid=self.test_project.gid, algorithm_module=algorithm_module, algorithm_classname=algorithm_class) fp.close() assert type(operation_gid) is str assert len(operation_gid) > 0
def test_store_simulator_view_model_noise(connectivity_index_factory, operation_factory): conn = connectivity_index_factory() sim_view_model = SimulatorAdapterModel() sim_view_model.connectivity = conn.gid sim_view_model.integrator = HeunStochasticViewModel() sim_view_model.integrator.noise.noise_seed = 45 op = operation_factory() storage_path = FilesHelper().get_project_folder(op.project, str(op.id)) h5.store_view_model(sim_view_model, storage_path) loaded_sim_view_model = h5.load_view_model(sim_view_model.gid, storage_path) assert isinstance(sim_view_model, SimulatorAdapterModel) assert isinstance(loaded_sim_view_model, SimulatorAdapterModel) assert sim_view_model.integrator.noise.noise_seed == loaded_sim_view_model.integrator.noise.noise_seed == 45
def test_gather_view_model_references(connectivity_index_factory, operation_factory): conn = connectivity_index_factory() sim_view_model = SimulatorAdapterModel() sim_view_model.connectivity = conn.gid op = operation_factory() storage_path = FilesHelper().get_project_folder(op.project, str(op.id)) h5.store_view_model(sim_view_model, storage_path) only_vm_references = h5.gather_view_model_references( sim_view_model.gid, storage_path, True) all_references = h5.gather_view_model_references(sim_view_model.gid, storage_path) assert len(only_vm_references) == 5 assert len(all_references) == 6
def test_gather_view_model_and_datatype_references(connectivity_index_factory, operation_factory): conn = connectivity_index_factory() sim_view_model = SimulatorAdapterModel() sim_view_model.connectivity = conn.gid op = operation_factory() storage_path = StorageInterface().get_project_folder( op.project.name, str(op.id)) h5.store_view_model(sim_view_model, storage_path) only_vm_references, _ = h5.gather_references_of_view_model( sim_view_model.gid, storage_path, True) vm_references, dt_references = h5.gather_references_of_view_model( sim_view_model.gid, storage_path) assert len(only_vm_references) == 5 assert len(vm_references) + len(dt_references) == 6
def test_export_simulator_configuration(self, operation_factory, connectivity_factory): """ Test export of a simulator configuration """ operation = operation_factory() simulator = SimulatorAdapterModel() simulator.connectivity = connectivity_factory(4).gid burst_configuration = BurstConfiguration(self.test_project.id) burst_configuration.fk_simulation = operation.id burst_configuration.simulator_gid = simulator.gid.hex burst_configuration = dao.store_entity(burst_configuration) storage_path = FilesHelper().get_project_folder(self.test_project, str(operation.id)) h5.store_view_model(simulator, storage_path) export_file = self.export_manager.export_simulator_configuration(burst_configuration.id) assert export_file is not None, "Export process should return path to export file" assert os.path.exists(export_file), "Could not find export file: %s on disk." % export_file assert zipfile.is_zipfile(export_file), "Generated file is not a valid ZIP file"
def async_launch_and_prepare_simulation(self, burst_config, user, project, simulator_algo, session_stored_simulator): try: operation = self.operation_service.prepare_operation( user.id, project.id, simulator_algo, session_stored_simulator.gid) ga = self.operation_service._prepare_metadata( simulator_algo.algorithm_category, {}, None, burst_config.gid) session_stored_simulator.generic_attributes = ga storage_path = self.files_helper.get_project_folder( project, str(operation.id)) h5.store_view_model(session_stored_simulator, storage_path) burst_config = self.burst_service.update_simulation_fields( burst_config.id, operation.id, session_stored_simulator.gid) self.burst_service.store_burst_configuration( burst_config, storage_path) wf_errs = 0 try: OperationService().launch_operation(operation.id, True) return operation except Exception as excep: self.logger.error(excep) wf_errs += 1 if burst_config: self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug( "Finished launching workflow. The operation was launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) if burst_config: self.burst_service.mark_burst_finished( burst_config, error_message=str(excep))
def test_gather_view_model_and_datatype_references_multiple_monitors( connectivity_index_factory, operation_factory, sensors_index_factory, surface_index_factory, region_mapping_index_factory): conn = connectivity_index_factory() _, surface = surface_index_factory(cortical=True) region_mapping_idx = region_mapping_index_factory( conn_gid=conn.gid, surface_gid=surface.gid.hex) sensors_idx, sensors = sensors_index_factory() proj = ProjectionSurfaceEEG(sensors=sensors, sources=surface, projection_data=numpy.ones(3)) op = operation_factory() storage_path = StorageInterface().get_project_folder( op.project.name, str(op.id)) prj_db = h5.store_complete(proj, storage_path) prj_db.fk_from_operation = op.id dao.store_entity(prj_db) seeg_monitor = EEGViewModel(projection=proj.gid, sensors=sensors.gid) seeg_monitor.region_mapping = region_mapping_idx.gid sim_view_model = SimulatorAdapterModel() sim_view_model.connectivity = conn.gid sim_view_model.monitors = [TemporalAverageViewModel(), seeg_monitor] op = operation_factory() storage_path = StorageInterface().get_project_folder( op.project.name, str(op.id)) h5.store_view_model(sim_view_model, storage_path) only_vm_references, _ = h5.gather_references_of_view_model( sim_view_model.gid, storage_path, True) assert len(only_vm_references) == 7 vm_references, dt_references = h5.gather_references_of_view_model( sim_view_model.gid, storage_path) assert len(vm_references + dt_references) == 12
def async_launch_and_prepare_pse(self, burst_config, user, project, simulator_algo, range_param1, range_param2, session_stored_simulator): try: algo_category = simulator_algo.algorithm_category operation_group = burst_config.operation_group metric_operation_group = burst_config.metric_operation_group operations = [] range_param2_values = [None] if range_param2: range_param2_values = range_param2.get_range_values() first_simulator = None ga = self.operation_service._prepare_metadata( simulator_algo.algorithm_category, {}, operation_group, burst_config.gid) session_stored_simulator.generic_attributes = ga for param1_value in range_param1.get_range_values(): for param2_value in range_param2_values: # Copy, but generate a new GUID for every Simulator in PSE simulator = copy.deepcopy(session_stored_simulator) simulator.gid = uuid.uuid4() self._set_simulator_range_parameter( simulator, range_param1.name, param1_value) ranges = { range_param1.name: self._set_range_param_in_dict(param1_value) } if param2_value is not None: self._set_simulator_range_parameter( simulator, range_param2.name, param2_value) ranges[ range_param2.name] = self._set_range_param_in_dict( param2_value) ranges = json.dumps(ranges) operation = self.operation_service.prepare_operation( user.id, project.id, simulator_algo, simulator.gid, operation_group, ranges) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) h5.store_view_model(simulator, storage_path) operations.append(operation) if first_simulator is None: first_simulator = simulator first_operation = operations[0] storage_path = self.files_helper.get_project_folder( project, str(first_operation.id)) burst_config = self.burst_service.update_simulation_fields( burst_config.id, first_operation.id, first_simulator.gid) self.burst_service.store_burst_configuration( burst_config, storage_path) datatype_group = DataTypeGroup( operation_group, operation_id=first_operation.id, fk_parent_burst=burst_config.gid, state=algo_category.defaultdatastate) dao.store_entity(datatype_group) metrics_datatype_group = DataTypeGroup( metric_operation_group, fk_parent_burst=burst_config.gid) dao.store_entity(metrics_datatype_group) wf_errs = 0 for operation in operations: try: OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 self.burst_service.mark_burst_finished( burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operations) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") return first_operation except Exception as excep: self.logger.error(excep) self.burst_service.mark_burst_finished(burst_config, error_message=str(excep))
def build(project=None, store_vm=False, use_time_series_region=False, status=STATUS_FINISHED): # there store the name and the (hi, lo, step) value of the range parameters range_1 = ["row1", [1, 2, 6]] range_2 = ["row2", [0.1, 0.3, 0.5]] # there are the actual numbers in the interval range_values_1 = [1, 3, 5] range_values_2 = [0.1, 0.4] user = user_factory() if project is None: project = project_factory(user) connectivity = connectivity_factory(4) if use_time_series_region: operation = operation_factory(test_project=project) connectivity_index_factory(op=operation, conn=connectivity) operation2 = operation_factory(test_project=project) surface = surface_factory() surface_index_factory(op=operation2, surface=surface) operation3 = operation_factory(test_project=project) region_mapping = region_mapping_factory(surface=surface, connectivity=connectivity) region_mapping_index_factory(op=operation3, conn_gid=connectivity.gid.hex, surface_gid=surface.gid.hex, region_mapping=region_mapping) algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS) adapter = ABCAdapter.build_adapter(algorithm) if store_vm: view_model = adapter.get_view_model_class()() view_model.connectivity = connectivity.gid else: view_model = None algorithm_ms = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS) adapter = ABCAdapter.build_adapter(algorithm_ms) view_model_ms = adapter.get_view_model_class()() op_group = OperationGroup(project.id, ranges=[json.dumps(range_1), json.dumps(range_2)]) op_group = dao.store_entity(op_group) op_group_ms = OperationGroup(project.id, ranges=[json.dumps(range_1), json.dumps(range_2)]) op_group_ms = dao.store_entity(op_group_ms) datatype_group = DataTypeGroup(op_group, state="RAW_DATA") datatype_group.no_of_ranges = 2 datatype_group.count_results = 6 datatype_group = dao.store_entity(datatype_group) dt_group_ms = DataTypeGroup(op_group_ms, state="RAW_DATA") dt_group_ms.no_of_ranges = 2 dt_group_ms.count_results = 6 dao.store_entity(dt_group_ms) # Now create some data types and add them to group for range_val1 in range_values_1: for range_val2 in range_values_2: view_model_gid = uuid.uuid4() view_model_ms_gid = uuid.uuid4() op = Operation(view_model_gid.hex, user.id, project.id, algorithm.id, status=status, op_group_id=op_group.id, range_values=json.dumps({range_1[0]: range_val1, range_2[0]: range_val2})) op = dao.store_entity(op) if use_time_series_region: ts = time_series_region_factory(connectivity=connectivity, region_mapping=region_mapping) ts_index = time_series_region_index_factory(ts=ts, connectivity=connectivity, region_mapping=region_mapping, test_user=user, test_project=project, op=op) else: ts = time_series_factory() ts_index = time_series_index_factory(ts=ts, op=op) ts_index.fk_datatype_group = datatype_group.id dao.store_entity(ts_index) op_ms = Operation(view_model_ms_gid.hex, user.id, project.id, algorithm.id, status=STATUS_FINISHED, op_group_id=op_group_ms.id, range_values=json.dumps({range_1[0]: range_val1, range_2[0]: range_val2})) op_ms = dao.store_entity(op_ms) datatype_measure_factory(ts_index, ts, op_ms, dt_group_ms) if store_vm: view_model = copy.deepcopy(view_model) view_model.gid = view_model_gid op_path = StorageInterface().get_project_folder(project.name, str(op.id)) h5.store_view_model(view_model, op_path) view_model_ms = copy.deepcopy(view_model_ms) view_model_ms.gid = view_model_ms_gid view_model_ms.time_series = ts_index.gid op_ms_path = StorageInterface().get_project_folder(project.name, str(op_ms.id)) h5.store_view_model(view_model_ms, op_ms_path) if not datatype_group.fk_from_operation: # Mark first operation ID datatype_group.fk_from_operation = op.id dt_group_ms.fk_from_operation = op_ms.id datatype_group = dao.store_entity(datatype_group) dt_group_ms = dao.store_entity(dt_group_ms) return datatype_group, dt_group_ms
def import_project_operations(self, project, import_path): """ This method scans provided folder and identify all operations that needs to be imported """ imported_operations = [] ordered_operations = self._retrieve_operations_in_order( project, import_path) success_no = 0 for operation_data in ordered_operations: if operation_data.is_old_form: operation_entity, datatype_group = self.__import_operation( operation_data.operation) new_op_folder = self.files_helper.get_project_folder( project, str(operation_entity.id)) try: operation_datatypes = self._load_datatypes_from_operation_folder( operation_data.operation_folder, operation_entity, datatype_group) # Create and store view_model from operation view_model = self._get_new_form_view_model( operation_entity, operation_data.info_from_xml) h5.store_view_model(view_model, new_op_folder) operation_entity.view_model_gid = view_model.gid.hex dao.store_entity(operation_entity) self._store_imported_datatypes_in_db( project, operation_datatypes) imported_operations.append(operation_entity) success_no = success_no + 1 except MissingReferenceException: operation_entity.status = STATUS_ERROR dao.store_entity(operation_entity) elif operation_data.main_view_model is not None: operation_entity = dao.store_entity(operation_data.operation) dt_group = None # TODO # Store the DataTypes in db dts = {} for dt_path in operation_data.dt_paths: dt = self.load_datatype_from_file(dt_path, operation_entity.id, dt_group, project.id) if isinstance(dt, BurstConfiguration): dao.store_entity(dt) else: dts[dt_path] = dt try: stored_dts_count = self._store_imported_datatypes_in_db( project, dts) if stored_dts_count > 0 or not operation_data.is_self_generated: imported_operations.append(operation_entity) new_op_folder = self.files_helper.get_project_folder( project, str(operation_entity.id)) for h5_file in operation_data.all_view_model_files: shutil.move(h5_file, new_op_folder) else: # In case all Dts under the current operation were Links and the ViewModel is dummy, # don't keep the Operation empty in DB dao.remove_entity(Operation, operation_entity.id) except MissingReferenceException: operation_entity.status = STATUS_ERROR dao.store_entity(operation_entity) else: self.logger.warning( "Folder %s will be ignored, as we could not find a serialized " "operation or DTs inside!" % operation_data.operation_folder) self.logger.warning( "Project has been only partially imported because of some missing dependent datatypes. " + "%d files were successfully imported from a total of %d!" % (success_no, len(ordered_operations))) return imported_operations
def _retrieve_operations_in_order(self, project, import_path): # type: (Project, str) -> list[Operation2ImportData] retrieved_operations = [] for root, _, files in os.walk(import_path): if OPERATION_XML in files: # Previous Operation format for uploading previous versions of projects operation_file_path = os.path.join(root, OPERATION_XML) operation, operation_xml_parameters = self.__build_operation_from_file( project, operation_file_path) operation.import_file = operation_file_path self.logger.debug("Found operation in old XML format: " + str(operation)) retrieved_operations.append( Operation2ImportData( operation, root, info_from_xml=operation_xml_parameters)) else: # We strive for the new format with ViewModelH5 main_view_model = None dt_paths = [] all_view_model_files = [] for file in files: if file.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION): h5_file = os.path.join(root, file) try: h5_class = H5File.h5_class_from_file(h5_file) if h5_class is ViewModelH5: all_view_model_files.append(h5_file) if not main_view_model: view_model = h5.load_view_model_from_file( h5_file) if type(view_model ) in VIEW_MODEL2ADAPTER.keys(): main_view_model = view_model else: file_update_manager = FilesUpdateManager() file_update_manager.upgrade_file(h5_file) dt_paths.append(h5_file) except Exception: self.logger.warning( "Unreadable H5 file will be ignored: %s" % h5_file) if main_view_model is not None: alg = VIEW_MODEL2ADAPTER[type(main_view_model)] operation = Operation(main_view_model.gid.hex, project.fk_admin, project.id, alg.id, status=STATUS_FINISHED, user_group=main_view_model. generic_attributes.operation_tag, start_date=datetime.now(), completion_date=datetime.now()) operation.create_date = main_view_model.create_date self.logger.debug( "Found main ViewModel to create operation for it: " + str(operation)) retrieved_operations.append( Operation2ImportData(operation, root, main_view_model, dt_paths, all_view_model_files)) elif len(dt_paths) > 0: alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) default_adapter = ABCAdapter.build_adapter(alg) view_model = default_adapter.get_view_model_class()() view_model.data_file = dt_paths[0] vm_path = h5.store_view_model(view_model, root) all_view_model_files.append(vm_path) operation = Operation(view_model.gid.hex, project.fk_admin, project.id, alg.id, status=STATUS_FINISHED, start_date=datetime.now(), completion_date=datetime.now()) self.logger.debug( "Found no ViewModel in folder, so we default to " + str(operation)) retrieved_operations.append( Operation2ImportData(operation, root, view_model, dt_paths, all_view_model_files, True)) return sorted(retrieved_operations, key=lambda op_data: op_data.order_field)
def _update_vm_generic_operation_tag(view_model, operation): project = dao.get_project_by_id(operation.fk_launched_in) storage_path = FilesHelper().get_project_folder(project, str(operation.id)) view_model.generic_attributes.operation_tag = operation.user_group h5.store_view_model(view_model, storage_path)
def import_project_operations(self, project, import_path): """ This method scans provided folder and identify all operations that needs to be imported """ imported_operations = [] ordered_operations = self._retrieve_operations_in_order( project, import_path) for operation_data in ordered_operations: if operation_data.is_old_form: operation_entity, datatype_group = self.__import_operation( operation_data.operation) new_op_folder = self.files_helper.get_project_folder( project, str(operation_entity.id)) operation_datatypes = self._load_datatypes_from_operation_folder( operation_data.operation_folder, operation_entity, datatype_group) # Create and store view_model from operation view_model = self._get_new_form_view_model(operation_entity) h5.store_view_model(view_model, new_op_folder) operation_entity.parameters = self._get_param_from_view_model_gid( view_model) dao.store_entity(operation_entity) self._store_imported_datatypes_in_db(project, operation_datatypes) imported_operations.append(operation_entity) elif operation_data.main_view_model is not None: operation_entity = dao.store_entity(operation_data.operation) dt_group = None # TODO # Store the DataTypes in db dts = {} for dt_path in operation_data.dt_paths: dt = self.load_datatype_from_file(dt_path, operation_entity.id, dt_group, project.id) if isinstance(dt, BurstConfiguration): dao.store_entity(dt) else: dts[dt_path] = dt stored_dts_count = self._store_imported_datatypes_in_db( project, dts) if stored_dts_count > 0 or not operation_data.is_self_generated: imported_operations.append(operation_entity) new_op_folder = self.files_helper.get_project_folder( project, str(operation_entity.id)) for h5_file in operation_data.all_view_model_files: shutil.move(h5_file, new_op_folder) else: # In case all Dts under the current operation were Links and the ViewModel is dummy, # don't keep the Operation empty in DB dao.remove_entity(Operation, operation_entity.id) else: self.logger.warning( "Folder %s will be ignored, as we could not find a serialized " "operation or DTs inside!" % operation_data.operation_folder) return imported_operations
def _store_view_model(operation, project, view_model): storage_path = FilesHelper().get_project_folder(project, str(operation.id)) h5.store_view_model(view_model, storage_path)