def test_export_simulator_configuration(self, operation_factory, connectivity_factory): """ Test export of a simulator configuration """ operation = operation_factory() simulator = SimulatorAdapterModel() simulator.connectivity = connectivity_factory(4).gid burst_configuration = BurstConfiguration(self.test_project.id) burst_configuration.fk_simulation = operation.id burst_configuration.simulator_gid = simulator.gid.hex burst_configuration = dao.store_entity(burst_configuration) storage_path = FilesHelper().get_project_folder( self.test_project, str(operation.id)) h5_path = h5.path_for(storage_path, SimulatorH5, simulator.gid) with SimulatorH5(h5_path) as h5_file: h5_file.store(simulator) export_file = self.export_manager.export_simulator_configuration( burst_configuration.id) assert export_file is not None, "Export process should return path to export file" assert os.path.exists( export_file ), "Could not find export file: %s on disk." % export_file assert zipfile.is_zipfile( export_file), "Generated file is not a valid ZIP file"
def test_export_simulator_configuration(self, operation_factory, connectivity_index_factory): """ Test export of a simulator configuration """ conn_gid = uuid.UUID(connectivity_index_factory().gid) operation = operation_factory(is_simulation=True, store_vm=True, test_project=self.test_project, conn_gid=conn_gid) burst_configuration = BurstConfiguration(self.test_project.id) burst_configuration.fk_simulation = operation.id burst_configuration.simulator_gid = operation.view_model_gid burst_configuration.name = "Test_burst" burst_configuration = dao.store_entity(burst_configuration) op_folder = StorageInterface().get_project_folder( self.test_project.name, str(operation.id)) BurstService().store_burst_configuration(burst_configuration, op_folder) export_file = self.export_manager.export_simulator_configuration( burst_configuration.id) assert export_file is not None, "Export process should return path to export file" assert os.path.exists( export_file ), "Could not find export file: %s on disk." % export_file assert zipfile.is_zipfile( export_file), "Generated file is not a valid ZIP file"
def test_load_burst_only(self): zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) burst_config = BurstConfiguration(self.test_project.id) burst_config.fk_simulation = op.id burst_config.simulator_gid = self.session_stored_simulator.gid.hex burst_config.name = 'Test_Burst' burst_config = dao.store_entity(burst_config) self.sess_mock['burst_id'] = str(burst_config.id) self.sess_mock['connectivity'] = connectivity.gid self.sess_mock['conduction_speed'] = "3.0" self.sess_mock['coupling'] = "Sigmoidal" with patch('cherrypy.session', self.sess_mock, create=True): common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator) self.simulator_controller.set_connectivity(**self.sess_mock._data) self.simulator_controller.set_stimulus(**self.sess_mock._data) storage_path = FilesHelper().get_project_folder(self.test_project, str(op.id)) SimulatorSerializer().serialize_simulator(self.session_stored_simulator, None, storage_path) with patch('cherrypy.session', self.sess_mock, create=True): self.simulator_controller.load_burst_read_only(str(burst_config.id)) is_simulator_load = common.get_from_session(KEY_IS_SIMULATOR_LOAD) is_simulator_copy = common.get_from_session(KEY_IS_SIMULATOR_COPY) last_loaded_form_url = common.get_from_session(KEY_LAST_LOADED_FORM_URL) assert is_simulator_load, "Simulator Load Flag should be True!" assert not is_simulator_copy, "Simulator Copy Flag should be False!" assert last_loaded_form_url == '/burst/setup_pse', "Incorrect last form URL!"
def test_export(self): op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) burst_config = BurstConfiguration(self.test_project.id) burst_config.fk_simulation = op.id burst_config.simulator_gid = self.session_stored_simulator.gid.hex burst_config = dao.store_entity(burst_config) storage_path = FilesHelper().get_project_folder( self.test_project, str(op.id)) h5_path = h5.path_for(storage_path, SimulatorH5, self.session_stored_simulator.gid) with SimulatorH5(h5_path) as h5_file: h5_file.store(self.session_stored_simulator) burst = dao.get_bursts_for_project(self.test_project.id) self.sess_mock['burst_id'] = str(burst[0].id) with patch('cherrypy.session', self.sess_mock, create=True): common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator) common.add2session(common.KEY_BURST_CONFIG, burst_config) result = self.simulator_controller.export(str(burst[0].id)) assert path.exists(result.input.name), "Simulation was not exported!"
def store_burst(project_id, operation=None): """ Build and persist BurstConfiguration entity. """ burst = BurstConfiguration(project_id) if operation is not None: burst.name = 'dummy_burst' burst.status = BurstConfiguration.BURST_FINISHED burst.start_time = datetime.now() burst.range1 = '["conduction_speed", {"lo": 50, "step": 1.0, "hi": 100.0}]' burst.range2 = '["connectivity", null]' burst.fk_simulation = operation.id burst.simulator_gid = uuid.uuid4().hex BurstService().store_burst_configuration(burst) return dao.store_entity(burst)
def get_burst_for_migration(burst_id, burst_match_dict, date_format, selected_db): """ This method is supposed to only be used when migrating from version 4 to version 5. It finds a BurstConfig in the old format (when it did not inherit from HasTraitsIndex), deletes it and returns its parameters. """ session = SA_SESSIONMAKER() burst_params = session.execute("""SELECT * FROM "BURST_CONFIGURATION" WHERE id = """ + burst_id).fetchone() session.close() if burst_params is None: return None, False burst_params_dict = {'datatypes_number': burst_params['datatypes_number'], 'dynamic_ids': burst_params['dynamic_ids'], 'range_1': burst_params['range1'], 'range_2': burst_params['range2'], 'fk_project': burst_params['fk_project'], 'name': burst_params['name'], 'status': burst_params['status'], 'error_message': burst_params['error_message'], 'start_time': burst_params['start_time'], 'finish_time': burst_params['finish_time'], 'fk_simulation': burst_params['fk_simulation'], 'fk_operation_group': burst_params['fk_operation_group'], 'fk_metric_operation_group': burst_params['fk_metric_operation_group']} if selected_db == 'sqlite': burst_params_dict['start_time'] = string2date(burst_params_dict['start_time'], date_format=date_format) burst_params_dict['finish_time'] = string2date(burst_params_dict['finish_time'], date_format=date_format) if burst_id not in burst_match_dict: burst_config = BurstConfiguration(burst_params_dict['fk_project']) burst_config.datatypes_number = burst_params_dict['datatypes_number'] burst_config.dynamic_ids = burst_params_dict['dynamic_ids'] burst_config.error_message = burst_params_dict['error_message'] burst_config.finish_time = burst_params_dict['finish_time'] burst_config.fk_metric_operation_group = burst_params_dict['fk_metric_operation_group'] burst_config.fk_operation_group = burst_params_dict['fk_operation_group'] burst_config.fk_project = burst_params_dict['fk_project'] burst_config.fk_simulation = burst_params_dict['fk_simulation'] burst_config.name = burst_params_dict['name'] burst_config.range1 = burst_params_dict['range_1'] burst_config.range2 = burst_params_dict['range_2'] burst_config.start_time = burst_params_dict['start_time'] burst_config.status = burst_params_dict['status'] new_burst = True else: burst_config = dao.get_burst_by_id(burst_match_dict[burst_id]) new_burst = False return burst_config, new_burst
def load_datatype_from_file(self, current_file, op_id, datatype_group=None, current_project_id=None): # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex """ Creates an instance of datatype from storage / H5 file :returns: DatatypeIndex """ self.logger.debug("Loading DataType from file: %s" % current_file) h5_class = H5File.h5_class_from_file(current_file) if h5_class is BurstConfigurationH5: if current_project_id is None: op_entity = dao.get_operationgroup_by_id(op_id) current_project_id = op_entity.fk_launched_in h5_file = BurstConfigurationH5(current_file) burst = BurstConfiguration(current_project_id) burst.fk_simulation = op_id h5_file.load_into(burst) result = burst else: datatype, generic_attributes = h5.load_with_links(current_file) index_class = h5.REGISTRY.get_index_for_datatype( datatype.__class__) datatype_index = index_class() datatype_index.fill_from_has_traits(datatype) datatype_index.fill_from_generic_attributes(generic_attributes) # Add all the required attributes if datatype_group: datatype_index.fk_datatype_group = datatype_group.id if len(datatype_group.subject) == 0: datatype_group.subject = datatype_index.subject dao.store_entity(datatype_group) datatype_index.fk_from_operation = op_id associated_file = h5.path_for_stored_index(datatype_index) if os.path.exists(associated_file): datatype_index.disk_size = FilesHelper.compute_size_on_disk( associated_file) result = datatype_index return result
def test_export_simulator_configuration(self, operation_factory): """ Test export of a simulator configuration """ operation = operation_factory(is_simulation=True, store_vm=True) burst_configuration = BurstConfiguration(self.test_project.id) burst_configuration.fk_simulation = operation.id burst_configuration.simulator_gid = operation.view_model_gid burst_configuration = dao.store_entity(burst_configuration) export_file = self.export_manager.export_simulator_configuration( burst_configuration.id) assert export_file is not None, "Export process should return path to export file" assert os.path.exists( export_file ), "Could not find export file: %s on disk." % export_file assert zipfile.is_zipfile( export_file), "Generated file is not a valid ZIP file"
def load_datatype_from_file(self, current_file, op_id, datatype_group=None, current_project_id=None): # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex """ Creates an instance of datatype from storage / H5 file :returns: DatatypeIndex """ self.logger.debug("Loading DataType from file: %s" % current_file) h5_class = H5File.h5_class_from_file(current_file) if h5_class is BurstConfigurationH5: if current_project_id is None: op_entity = dao.get_operationgroup_by_id(op_id) current_project_id = op_entity.fk_launched_in h5_file = BurstConfigurationH5(current_file) burst = BurstConfiguration(current_project_id) burst.fk_simulation = op_id h5_file.load_into(burst) result = burst else: datatype, generic_attributes = h5.load_with_links(current_file) already_existing_datatype = h5.load_entity_by_gid(datatype.gid) if datatype_group is not None and already_existing_datatype is not None: raise DatatypeGroupImportException( "The datatype group that you are trying to import" " already exists!") index_class = h5.REGISTRY.get_index_for_datatype( datatype.__class__) datatype_index = index_class() datatype_index.fill_from_has_traits(datatype) datatype_index.fill_from_generic_attributes(generic_attributes) if datatype_group is not None and hasattr(datatype_index, 'fk_source_gid') and \ datatype_index.fk_source_gid is not None: ts = h5.load_entity_by_gid(datatype_index.fk_source_gid) if ts is None: op = dao.get_operations_in_group( datatype_group.fk_operation_group, only_first_operation=True) op.fk_operation_group = None dao.store_entity(op) dao.remove_entity(OperationGroup, datatype_group.fk_operation_group) dao.remove_entity(DataTypeGroup, datatype_group.id) raise DatatypeGroupImportException( "Please import the time series group before importing the" " datatype measure group!") # Add all the required attributes if datatype_group: datatype_index.fk_datatype_group = datatype_group.id if len(datatype_group.subject) == 0: datatype_group.subject = datatype_index.subject dao.store_entity(datatype_group) datatype_index.fk_from_operation = op_id associated_file = h5.path_for_stored_index(datatype_index) if os.path.exists(associated_file): datatype_index.disk_size = StorageInterface.compute_size_on_disk( associated_file) result = datatype_index return result