def _export_bursts(self, project, project_datatypes, zip_file): bursts_dict = {} bursts_count = dao.get_bursts_for_project(project.id, count=True) for start_idx in range(0, bursts_count, BURST_PAGE_SIZE): bursts = dao.get_bursts_for_project(project.id, page_start=start_idx, page_size=BURST_PAGE_SIZE) for burst in bursts: one_info = self._build_burst_export_dict(burst) # Save data in dictionary form so we can just save it as a json later on bursts_dict[burst.id] = one_info datatype_burst_mapping = {} for dt in project_datatypes: datatype_burst_mapping[dt[KEY_DT_GID]] = dt[KEY_BURST_ID] project_folder = FilesHelper().get_project_folder(project) bursts_file_name = os.path.join(project_folder, BURST_INFO_FILE) burst_info = { BURSTS_DICT_KEY: bursts_dict, DT_BURST_MAP: datatype_burst_mapping } zip_file.writestr(os.path.basename(bursts_file_name), json.dumps(burst_info))
def export_project(self, project): """ Given a project root and the TVB storage_path, create a ZIP ready for export. :param project: project object which identifies project to be exported :param project)name: name of the project to be exported :param export_folder: folder where to store export result( e.g zip file) """ if project is None: raise ExportException("Please provide project to be exported") files_helper = FilesHelper() project_folder = files_helper.get_project_folder(project) bursts_dict = {} datatype_burst_mapping = {} bursts_count = dao.get_bursts_for_project(project.id, count=True) for start_idx in range(0, bursts_count, BURST_PAGE_SIZE): bursts = dao.get_bursts_for_project(project.id, page_start=start_idx, page_end=start_idx + BURST_PAGE_SIZE) for burst in bursts: self._build_burst_export_dict(burst, bursts_dict) datatypes_count = dao.get_datatypes_for_project(project.id, count=True) for start_idx in range(0, datatypes_count, DATAYPES_PAGE_SIZE): datatypes = dao.get_datatypes_for_project(project.id, page_start=start_idx, page_end=start_idx + DATAYPES_PAGE_SIZE) for datatype in datatypes: datatype_burst_mapping[datatype.gid] = datatype.fk_parent_burst # Compute path and name of the zip file now = datetime.now() date_str = now.strftime("%Y-%m-%d_%H-%M") zip_file_name = "%s_%s.%s" % (date_str, project.name, self.ZIP_FILE_EXTENSION) export_folder = self._build_data_export_folder(project) result_path = os.path.join(export_folder, zip_file_name) bursts_file_name = os.path.join(project_folder, BURST_INFO_FILE) burst_info = { BURSTS_DICT_KEY: bursts_dict, DT_BURST_MAP: datatype_burst_mapping } with open(bursts_file_name, 'w') as bursts_file: bursts_file.write(json.dumps(burst_info)) # pack project content into a ZIP file result_zip = files_helper.zip_folder(result_path, project_folder) # remove these files, since we only want them in export archive os.remove(bursts_file_name) return result_zip
def get_available_bursts(project_id): """ Return all the burst for the current project. """ bursts = dao.get_bursts_for_project( project_id, page_size=MAX_BURSTS_DISPLAYED) or [] return bursts
def test_export(self): op = TestFactory.create_operation() simulator_index = SimulatorIndex() simulator_index.fill_from_has_traits(self.session_stored_simulator) burst_config = BurstConfiguration(self.test_project.id, simulator_index.id) burst_config = dao.store_entity(burst_config) simulator_index.fk_from_operation = op.id simulator_index = dao.store_entity(simulator_index) simulator_index.fk_parent_burst = burst_config.id simulator_index = dao.store_entity(simulator_index) simulator_h5 = h5.path_for_stored_index(simulator_index) with SimulatorH5(simulator_h5) as h5_file: h5_file.store(self.session_stored_simulator) burst = dao.get_bursts_for_project(self.test_project.id) self.sess_mock['burst_id'] = str(burst[0].id) with patch('cherrypy.session', self.sess_mock, create=True): common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator) common.add2session(common.KEY_BURST_CONFIG, burst_config) result = self.simulator_controller.export(str(burst[0].id)) assert path.exists(result.input.name), "Simulation was not exported!"
def remove_project(self, project_id): """ Remove Project from DB and File Storage. """ try: project2delete = dao.get_project_by_id(project_id) self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name) project_bursts = dao.get_bursts_for_project(project_id) for burst in project_bursts: dao.remove_entity(burst.__class__, burst.id) project_datatypes = dao.get_datatypes_in_project(project_id) for one_data in project_datatypes: self.remove_datatype(project_id, one_data.gid, True) links = dao.get_links_for_project(project_id) for one_link in links: dao.remove_entity(Links, one_link.id) self.structure_helper.remove_project_structure(project2delete.name) dao.delete_project(project_id) self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name) except RemoveDataTypeException as excep: self.logger.exception("Could not execute operation Node Remove!") raise ProjectServiceException(str(excep)) except FileStructureException as excep: self.logger.exception("Could not delete because of rights!") raise ProjectServiceException(str(excep)) except Exception as excep: self.logger.exception(str(excep)) raise ProjectServiceException(str(excep))
def test_export(self): op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) burst_config = BurstConfiguration(self.test_project.id) burst_config.fk_simulation = op.id burst_config.simulator_gid = self.session_stored_simulator.gid.hex burst_config = dao.store_entity(burst_config) storage_path = FilesHelper().get_project_folder( self.test_project, str(op.id)) h5_path = h5.path_for(storage_path, SimulatorH5, self.session_stored_simulator.gid) with SimulatorH5(h5_path) as h5_file: h5_file.store(self.session_stored_simulator) burst = dao.get_bursts_for_project(self.test_project.id) self.sess_mock['burst_id'] = str(burst[0].id) with patch('cherrypy.session', self.sess_mock, create=True): common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator) common.add2session(common.KEY_BURST_CONFIG, burst_config) result = self.simulator_controller.export(str(burst[0].id)) assert path.exists(result.input.name), "Simulation was not exported!"
def test_export_import_burst(self, user_factory, project_factory, simulation_launch): """ Test that fk_parent_burst is correctly preserved after export/import """ test_user = user_factory() test_project = project_factory(test_user, "TestIESim") sim_op = simulation_launch(test_user, test_project, simulation_length=10) tries = 5 while not sim_op.has_finished and tries > 0: sleep(5) tries = tries - 1 sim_op = dao.get_operation_by_id(sim_op.id) assert sim_op.has_finished, "Simulation did not finish in the given time" self.zip_path = ExportManager().export_project(test_project) assert self.zip_path is not None, "Exported file is none" self.project_service.remove_project(test_project.id) self.import_service.import_project_structure(self.zip_path, test_user.id) retrieved_project = self.project_service.retrieve_projects_for_user( test_user.id)[0][0] ts = try_get_last_datatype(retrieved_project.id, TimeSeriesRegionIndex) bursts = dao.get_bursts_for_project(retrieved_project.id) assert 1 == len(bursts) assert ts.fk_parent_burst == bursts[0].gid
def remove_project(self, project_id): """ Remove Project from DB and File Storage. """ try: project2delete = dao.get_project_by_id(project_id) self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name) project_bursts = dao.get_bursts_for_project(project_id) for burst in project_bursts: dao.remove_entity(burst.__class__, burst.id) project_datatypes = dao.get_datatypes_in_project(project_id) for one_data in project_datatypes: self.remove_datatype(project_id, one_data.gid, True) links = dao.get_links_for_project(project_id) for one_link in links: dao.remove_entity(model.Links, one_link.id) self.structure_helper.remove_project_structure(project2delete.name) dao.delete_project(project_id) self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name) except RemoveDataTypeException, excep: self.logger.exception("Could not execute operation Node Remove!") raise ProjectServiceException(str(excep))
def test_rename_burst(self): new_name = "Test Burst Configuration 2" operation = TestFactory.create_operation() burst_config = TestFactory.store_burst(self.test_project.id, operation) burst = dao.get_bursts_for_project(self.test_project.id) self.sess_mock['burst_id'] = str(burst[0].id) self.sess_mock['burst_name'] = new_name with patch('cherrypy.session', self.sess_mock, create=True): common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator) common.add2session(common.KEY_BURST_CONFIG, burst_config) result = self.simulator_controller.rename_burst(burst[0].id, new_name) assert result == '{"success": "Simulation successfully renamed!"}', \ "Some error happened at renaming, probably because of invalid new name." assert dao.get_bursts_for_project(self.test_project.id)[0].name == new_name, "Name wasn't actually changed."
def export_project(self, project): """ Given a project root and the TVB storage_path, create a ZIP ready for export. :param project: project object which identifies project to be exported """ if project is None: raise ExportException("Please provide project to be exported") files_helper = FilesHelper() project_folder = files_helper.get_project_folder(project) bursts_dict = {} datatype_burst_mapping = {} bursts_count = dao.get_bursts_for_project(project.id, count=True) for start_idx in range(0, bursts_count, BURST_PAGE_SIZE): bursts = dao.get_bursts_for_project(project.id, page_start=start_idx, page_end=start_idx + BURST_PAGE_SIZE) for burst in bursts: self._build_burst_export_dict(burst, bursts_dict) datatypes_count = dao.get_datatypes_for_project(project.id, count=True) for start_idx in range(0, datatypes_count, DATAYPES_PAGE_SIZE): datatypes = dao.get_datatypes_for_project(project.id, page_start=start_idx, page_end=start_idx + DATAYPES_PAGE_SIZE) for datatype in datatypes: datatype_burst_mapping[datatype.gid] = datatype.fk_parent_burst # Compute path and name of the zip file now = datetime.now() date_str = now.strftime("%Y-%m-%d_%H-%M") zip_file_name = "%s_%s.%s" % (date_str, project.name, self.ZIP_FILE_EXTENSION) export_folder = self._build_data_export_folder(project) result_path = os.path.join(export_folder, zip_file_name) bursts_file_name = os.path.join(project_folder, BURST_INFO_FILE) burst_info = {BURSTS_DICT_KEY: bursts_dict, DT_BURST_MAP: datatype_burst_mapping} with open(bursts_file_name, 'w') as bursts_file: bursts_file.write(json.dumps(burst_info)) # pack project content into a ZIP file result_zip = files_helper.zip_folder(result_path, project_folder) # remove these files, since we only want them in export archive os.remove(bursts_file_name) return result_zip
def get_available_bursts(project_id): """ Return all the burst for the current project. """ bursts = dao.get_bursts_for_project(project_id, page_size=MAX_BURSTS_DISPLAYED) or [] for burst in bursts: burst.prepare_after_load() return bursts
def get_available_bursts(project_id): """ Return all the burst for the current project. """ bursts = dao.get_bursts_for_project(project_id, page_size=MAX_BURSTS_DISPLAYED) or [] # for burst in bursts: # burst.prepare_after_load() return bursts
def test_load_burst_only(self): zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) simulator_index = SimulatorIndex() simulator_index.fill_from_has_traits(self.session_stored_simulator) burst_config = BurstConfiguration(self.test_project.id, simulator_index.id) burst_config = dao.store_entity(burst_config) simulator_index.fk_from_operation = burst_config.id simulator_index = dao.store_entity(simulator_index) simulator_index.fk_parent_burst = burst_config.id simulator_index = dao.store_entity(simulator_index) burst = dao.get_bursts_for_project(self.test_project.id) self.sess_mock['burst_id'] = str(burst[0].id) self.sess_mock['_connectivity'] = connectivity.gid self.sess_mock['_conduction_speed'] = "3.0" self.sess_mock['_coupling'] = "Sigmoidal" with patch('cherrypy.session', self.sess_mock, create=True): common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator) self.simulator_controller.set_connectivity(**self.sess_mock._data) self.simulator_controller.set_stimulus(**self.sess_mock._data) storage_path = FilesHelper().get_project_folder( self.test_project, str(simulator_index.fk_from_operation)) simulator_service = SimulatorService() SimulatorSerializer().serialize_simulator( self.session_stored_simulator, simulator_index.gid, None, storage_path) with patch('cherrypy.session', self.sess_mock, create=True): self.simulator_controller.load_burst_read_only(str(burst[0].id)) is_simulator_load = common.get_from_session(KEY_IS_SIMULATOR_LOAD) is_simulator_copy = common.get_from_session(KEY_IS_SIMULATOR_COPY) last_loaded_form_url = common.get_from_session( KEY_LAST_LOADED_FORM_URL) database_simulator = dao.get_generic_entity(SimulatorIndex, burst_config.id, 'fk_parent_burst')[0] assert simulator_index.gid == database_simulator.gid, "Simulator was not added correctly!" assert is_simulator_load, "Simulator Load Flag should be True!" assert not is_simulator_copy, "Simulator Copy Flag should be False!" assert last_loaded_form_url == '/burst/setup_pse', "Incorrect last form URL!"
def _export_bursts(self, project, project_datatypes, zip_file): bursts_dict = {} bursts_count = dao.get_bursts_for_project(project.id, count=True) for start_idx in range(0, bursts_count, BURST_PAGE_SIZE): bursts = dao.get_bursts_for_project(project.id, page_start=start_idx, page_size=BURST_PAGE_SIZE) for burst in bursts: self._build_burst_export_dict(burst, bursts_dict) datatype_burst_mapping = {} for dt in project_datatypes: datatype_burst_mapping[dt[KEY_DT_GID]] = dt[KEY_BURST_ID] project_folder = FilesHelper().get_project_folder(project) bursts_file_name = os.path.join(project_folder, BURST_INFO_FILE) burst_info = {BURSTS_DICT_KEY: bursts_dict, DT_BURST_MAP: datatype_burst_mapping} zip_file.writestr(os.path.basename(bursts_file_name), json.dumps(burst_info))
def _check_burst_removed(self): """ Test that a burst was properly removed. This means checking that the burst entity, any workflow steps and any datatypes resulted from the burst are also removed. """ remaining_bursts = dao.get_bursts_for_project(self.test_project.id) assert 0 == len(remaining_bursts), "Burst was not deleted" ops_number = dao.get_operation_numbers(self.test_project.id)[0] assert 0 == ops_number, "Operations were not deleted." datatypes = dao.get_datatypes_in_project(self.test_project.id) assert 0 == len(datatypes) datatype1_stored = self.count_all_entities(Datatype1) datatype2_stored = self.count_all_entities(Datatype2) assert 0 == datatype1_stored, "Specific datatype entries for DataType1 were not deleted." assert 0 == datatype2_stored, "Specific datatype entries for DataType2 were not deleted."
def test_get_history_status(self): burst_config = BurstConfiguration(self.test_project.id) burst_config.start_time = datetime.now() dao.store_entity(burst_config) burst = dao.get_bursts_for_project(self.test_project.id) self.sess_mock['burst_ids'] = '["' + str(burst[0].id) + '"]' with patch('cherrypy.session', self.sess_mock, create=True): common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator) common.add2session(common.KEY_BURST_CONFIG, burst_config) result = self.simulator_controller.get_history_status(**self.sess_mock._data).split(',') assert int(result[0][2:]) == burst[0].id, "Incorrect burst was used." assert result[1] == ' "running"', "Status should be set to running." assert result[2] == ' false', "Burst shouldn't be group." assert result[3] == ' ""', "Message should be empty, which means that there shouldn't be any errors." assert int(result[4][2:-4]) >= 0, "Running time should be greater than or equal to 0."
def _check_burst_removed(self): """ Test that a burst was properly removed. This means checking that the burst entity, any workflow steps and any datatypes resulted from the burst are also removed. """ remaining_bursts = dao.get_bursts_for_project(self.test_project.id) self.assertEqual(0, len(remaining_bursts), "Burst was not deleted") ops_number = dao.get_operation_numbers(self.test_project.id)[0] self.assertEqual(0, ops_number, "Operations were not deleted.") datatypes = dao.get_datatypes_in_project(self.test_project.id) self.assertEqual(0, len(datatypes)) wf_steps = self.count_all_entities(model.WorkflowStep) datatype1_stored = self.count_all_entities(Datatype1) datatype2_stored = self.count_all_entities(Datatype2) self.assertEqual(0, wf_steps, "Workflow steps were not deleted.") self.assertEqual(0, datatype1_stored, "Specific datatype entries for DataType1 were not deleted.") self.assertEqual(0, datatype2_stored, "Specific datatype entries for DataType2 were not deleted.")
def remove_project(self, project_id): """ Remove Project from DB and File Storage. """ try: project2delete = dao.get_project_by_id(project_id) project_bursts = dao.get_bursts_for_project(project_id) for burst in project_bursts: dao.remove_entity(burst.__class__, burst.id) project_datatypes = dao.get_datatypes_info_for_project(project_id) for one_data in project_datatypes: self.remove_datatype(project_id, one_data[9], True) self.structure_helper.remove_project_structure(project2delete.name) name = project2delete.name dao.delete_project(project_id) self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + name) except RemoveDataTypeError, excep: self.logger.error("Invalid DataType to remove!") self.logger.exception(excep) raise ProjectServiceException(excep.message)
def remove_project(self, project_id): """ Remove Project from DB and File Storage. """ try: project2delete = dao.get_project_by_id(project_id) self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name) project_datatypes = dao.get_datatypes_in_project(project_id) project_datatypes.sort(key=lambda dt: dt.create_date, reverse=True) for one_data in project_datatypes: self.remove_datatype(project_id, one_data.gid, True) links = dao.get_links_for_project(project_id) for one_link in links: dao.remove_entity(Links, one_link.id) project_bursts = dao.get_bursts_for_project(project_id) for burst in project_bursts: dao.remove_entity(burst.__class__, burst.id) project_folder = self.structure_helper.get_project_folder(project2delete) self.structure_helper.remove_project_structure(project2delete.name) encrypted_path = encryption_handler.compute_encrypted_folder_path(project_folder) if os.path.exists(encrypted_path): self.structure_helper.remove_folder(encrypted_path) if os.path.exists(encryption_handler.project_key_path(project_id)): os.remove(encryption_handler.project_key_path(project_id)) dao.delete_project(project_id) self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name) except RemoveDataTypeException as excep: self.logger.exception("Could not execute operation Node Remove!") raise ProjectServiceException(str(excep)) except FileStructureException as excep: self.logger.exception("Could not delete because of rights!") raise ProjectServiceException(str(excep)) except Exception as excep: self.logger.exception(str(excep)) raise ProjectServiceException(str(excep))
def _update_burst_configurations(self, project_id): burst_configs = dao.get_bursts_for_project(project_id) for burst_config in burst_configs: burst_config.datatypes_number = dao.count_datatypes_in_burst( burst_config.gid) dao.store_entity(burst_config)