def launch(self, data_file): """ Execute import operations: unpack ZIP, build and store generic DataType objects. :param data_file: an archive (ZIP / HDF5) containing the `DataType` :raises LaunchException: when data_file is None, nonexistent, or invalid \ (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. ) """ if data_file is None: raise LaunchException( "Please select file which contains data to import") if os.path.exists(data_file): if zipfile.is_zipfile(data_file): current_op = dao.get_operation_by_id(self.operation_id) # Creates a new TMP folder where to extract data tmp_folder = os.path.join(self.storage_path, "tmp_import") FilesHelper().unpack_zip(data_file, tmp_folder) operations = ImportService().import_project_operations( current_op.project, self.storage_path) shutil.rmtree(tmp_folder) self.nr_of_datatypes += len(operations) else: # upgrade file if necessary file_update_manager = FilesUpdateManager() file_update_manager.upgrade_file(data_file) folder, h5file = os.path.split(data_file) manager = HDF5StorageManager(folder, h5file) if manager.is_valid_hdf5_file(): datatype = None try: service = ImportService() datatype = service.load_datatype_from_file( folder, h5file, self.operation_id, final_storage=self.storage_path) service.store_datatype(datatype) self.nr_of_datatypes += 1 except Exception as excep: # If import operation failed delete file from disk. if datatype is not None and os.path.exists( datatype.get_storage_file_path()): os.remove(datatype.get_storage_file_path()) self.log.exception(excep) raise LaunchException( "Invalid file received as input. Most probably incomplete " "meta-data ... " + str(excep)) else: raise LaunchException( "Uploaded file: %s is neither in ZIP or HDF5 format" % data_file) else: raise LaunchException("File: %s to import does not exists." % data_file)
def setup_method(self): """ Reset the database before each test. """ self.import_service = ImportService() self.project_service = ProjectService() self.zip_path = None
def load_burst_from_json(self, **data): """Upload Burst from previously exported JSON file""" self.logger.debug("Uploading ..." + str(data)) try: upload_param = "uploadedfile" if upload_param in data and data[upload_param]: upload_param = data[upload_param] if isinstance(upload_param, FieldStorage) or isinstance( upload_param, Part): if not upload_param.file: raise BurstServiceException( "Please select a valid JSON file.") upload_param = upload_param.file.read() upload_param = json.loads(upload_param) prj_id = common.get_current_project().id importer = ImportService() burst_entity = importer.load_burst_entity(upload_param, prj_id) common.add2session(common.KEY_BURST_CONFIG, burst_entity) except Exception as excep: self.logger.warning(excep.message) common.set_error_message(excep.message) raise cherrypy.HTTPRedirect('/burst/')
def launch_simulation_workflow(json_path, prj_id): """ :param json_path: Path towards a local JSON file exported from GUI :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface """ project = dao.get_project_by_id(prj_id) with open(json_path, 'rb') as input_file: simulation_json = input_file.read() simulation_json = json.loads(simulation_json) LOG.info("Simulation JSON loaded from file '%s': \n %s", json_path, simulation_json) importer = ImportService() simulation_config = importer.load_burst_entity(simulation_json, prj_id) LOG.info("Simulation Workflow configuration object loaded: \n %s", simulation_config) flow_service = FlowService() stored_adapter = flow_service.get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS) LOG.info("Found Simulation algorithm in local DB: \n %s", stored_adapter) burst_service = BurstService() burst_service.launch_burst(simulation_config, 0, stored_adapter.id, project.administrator.id, LAUNCH_NEW) LOG.info( "Check in the web GUI for your operation. It should be starting now ..." )
def load_simulator_configuration_from_zip(self, **data): """Upload Simulator from previously exported ZIP file""" self.logger.debug("Uploading ..." + str(data)) last_loaded_form_url = SimulatorWizzardURLs.SETUP_PSE_URL try: upload_param = "uploadedfile" if upload_param in data and data[upload_param]: simulator, burst_config, sim_folder = self.burst_service.load_simulation_from_zip( data[upload_param], self.context.project) dts_folder = os.path.join( sim_folder, StorageInterface.EXPORTED_SIMULATION_DTS_DIR) ImportService().import_project_operations( self.context.project, dts_folder, False, None) self.monitors_handler.build_list_of_monitors_from_view_models( simulator) if burst_config.is_pse_burst(): last_loaded_form_url = SimulatorWizzardURLs.LAUNCH_PSE_URL self.context.init_session_at_sim_config_from_zip( burst_config, simulator, last_loaded_form_url) except IOError as ioexcep: self.logger.exception(ioexcep) self.context.set_warning_message( "This ZIP does not contain a complete simulator configuration") except ServicesBaseException as excep: self.logger.warning(excep.message) self.context.set_warning_message(excep.message) raise cherrypy.HTTPRedirect('/burst/')
def create_user(self, username=None, password=None, password2=None, role=None, email=None, comment=None, email_msg=None, validated=False): """ Service Layer for creating a new user. """ #Basic fields validation. if (username is None) or len(username) < 1: raise UsernameException("Empty UserName!") if (password is None) or len(password) < 1: raise UsernameException("Empty password!") if password2 is None: password2 = password if password != password2: raise UsernameException("Passwords do not match!") try: user_validated = (role == 'ADMINISTRATOR') or validated user = model.User(username, password, email, user_validated, role) if email_msg is None: email_msg = 'Hello ' + username + TEXT_CREATE admin_msg = (TEXT_CREATE_TO_ADMIN + username + ' :\n ' + TvbProfile.current.web.BASE_URL + 'user/validate/' + username + '\n\n"' + str(comment) + '"') self.logger.info("Registering user " + username + " !") if role != 'ADMINISTRATOR' and email is not None: admins = UserService.get_administrators() admin = admins[randint(0, len(admins) - 1)] if admin.email is not None and ( admin.email != TvbProfile.current.web.admin.DEFAULT_ADMIN_EMAIL): # Do not send validation email in case default admin email remained unchanged email_sender.send(FROM_ADDRESS, admin.email, SUBJECT_REGISTER, admin_msg) self.logger.debug("Email sent to:" + admin.email + " for validating user:"******" !") email_sender.send(FROM_ADDRESS, email, SUBJECT_REGISTER, email_msg) self.logger.debug("Email sent to:" + email + " for notifying new user:"******" !") user = dao.store_entity(user) if role == model.ROLE_ADMINISTRATOR: uploaded = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") ImportService().import_project_structure(uploaded, user.id) else: handle_event( ".".join([self.__class__.__name__, stack()[0][3]]), user) return TEXT_DISPLAY except Exception, excep: self.logger.exception("Could not create user!") raise UsernameException(str(excep))
def update(input_file, burst_match_dict=None): """ :param input_file: the file that needs to be converted to a newer file storage version. """ if not os.path.isfile(input_file): raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a " "valid file on the disk." % input_file) folder, file_name = os.path.split(input_file) storage_manager = HDF5StorageManager(folder, file_name) root_metadata = storage_manager.get_metadata() if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata: raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing " "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME)) class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME] class_name = str(class_name, 'utf-8') if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata: LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder)) projection_type = ProjectionsType.EEG.value if "SEEG" in class_name: projection_type = ProjectionsType.SEEG.value elif "MEG" in class_name: projection_type = ProjectionsType.MEG.value root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type) LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type)) elif "TimeSeries" in class_name: LOGGER.info("Updating TS %s from %s" % (file_name, folder)) service = ImportService() try: operation_id = int(os.path.split(folder)[1]) dt = service.load_datatype_from_file(os.path.join(folder, file_name), operation_id) dt_db = dao.get_datatype_by_gid(dt.gid) except ValueError: dt_db = None if dt_db is not None: # DT already in DB (update of own storage, by making sure all fields are being correctly populated) dt_db.configure() dt_db.persist_full_metadata() try: # restore in DB, in case TVB 1.4 had wrongly imported flags dao.store_entity(dt_db) except Exception: LOGGER.exception("Could not update flags in DB, but we continue with the update!") elif FIELD_SURFACE_MAPPING not in root_metadata: # Have default values, to avoid the full project not being imported root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False) root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False) root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION storage_manager.set_metadata(root_metadata)
def import_default_project(admin_user=None): if not admin_user: admin_user = TestFactory.create_user() project_path = os.path.join(os.path.dirname(os.path.dirname(cff_dataset.__file__)), 'Default_Project.zip') import_service = ImportService() import_service.import_project_structure(project_path, admin_user.id) return import_service.created_projects[0]
def create_user(self, username=None, display_name=None, password=None, password2=None, role=None, email=None, comment=None, email_msg=None, validated=False, skip_import=False, gid=None, skip_sending_email=False): """ Service Layer for creating a new user. """ if (username is None) or len(username) < 1: raise UsernameException("Empty UserName!") if (display_name is None) or len(display_name) < 1: raise UsernameException("Empty display name!") if (password is None) or len(password) < 1: raise UsernameException("Empty password!") if password2 is None: password2 = password if password != password2: raise UsernameException("Passwords do not match!") try: user_validated = (role == ROLE_ADMINISTRATOR) or validated user = User(username, display_name, password, email, user_validated, role, gid) if email_msg is None: email_msg = 'Hello ' + username + TEXT_CREATE admin_msg = (TEXT_CREATE_TO_ADMIN + username + ' :\n ' + TvbProfile.current.web.BASE_URL + '/user/validate/' + username + '\n\n"' + str(comment) + '"') self.logger.info("Registering user " + username + " !") if role != ROLE_ADMINISTRATOR and email is not None and not skip_sending_email: admins = UserService.get_administrators() admin = admins[random.randint(0, len(admins) - 1)] if admin.email is not None and (admin.email != TvbProfile.current.web.admin.DEFAULT_ADMIN_EMAIL): # Do not send validation email in case default admin email remained unchanged email_sender.send(FROM_ADDRESS, admin.email, SUBJECT_REGISTER, admin_msg) self.logger.debug("Email sent to:" + admin.email + " for validating user:"******" !") email_sender.send(FROM_ADDRESS, email, SUBJECT_REGISTER, email_msg) self.logger.debug("Email sent to:" + email + " for notifying new user:"******" !") user = dao.store_entity(user) if role == ROLE_ADMINISTRATOR and not skip_import: to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") if not os.path.exists(to_upload): self.logger.warning("Could not find DEFAULT PROJECT at path %s. You might want to import it " "yourself. See TVB documentation about where to find it!" % to_upload) return TEXT_DISPLAY ImportService().import_project_structure(to_upload, user.id) else: try: default_prj_id = dao.get_project_by_gid(DEFAULT_PROJECT_GID).id dao.add_members_to_project(default_prj_id, [user.id]) except Exception: self.logger.warning( "Could not link user_id: %d with project_gid: %s " % (user.id, DEFAULT_PROJECT_GID)) return TEXT_DISPLAY except Exception as excep: self.logger.exception("Could not create user!") raise UsernameException(str(excep))
def launch(self, view_model): # type: (TVBImporterModel) -> [] """ Execute import operations: unpack ZIP, build and store generic DataType objects. :raises LaunchException: when data_file is None, nonexistent, or invalid \ (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. ) """ if view_model.data_file is None: raise LaunchException( "Please select file which contains data to import") service = ImportService() if os.path.exists(view_model.data_file): if zipfile.is_zipfile(view_model.data_file): current_op = dao.get_operation_by_id(self.operation_id) # Creates a new TMP folder where to extract data tmp_folder = os.path.join(self.storage_path, "tmp_import") FilesHelper().unpack_zip(view_model.data_file, tmp_folder) operations = service.import_project_operations( current_op.project, tmp_folder) shutil.rmtree(tmp_folder) self.nr_of_datatypes += len(operations) else: # upgrade file if necessary file_update_manager = FilesUpdateManager() file_update_manager.upgrade_file(view_model.data_file) folder, h5file = os.path.split(view_model.data_file) manager = HDF5StorageManager(folder, h5file) if manager.is_valid_hdf5_file(): datatype = None try: datatype = service.load_datatype_from_file( view_model.data_file, self.operation_id) service.check_import_references( view_model.data_file, datatype) service.store_datatype(datatype, view_model.data_file) self.nr_of_datatypes += 1 except ImportException as excep: self.log.exception(excep) if datatype is not None: target_path = h5.path_for_stored_index(datatype) if os.path.exists(target_path): os.remove(target_path) raise LaunchException( "Invalid file received as input. " + str(excep)) else: raise LaunchException( "Uploaded file: %s is neither in ZIP or HDF5 format" % view_model.data_file) else: raise LaunchException("File: %s to import does not exists." % view_model.data_file)
def projectupload(self, **data): """Upload Project from TVB ZIP.""" self.logger.debug("Uploading ..." + str(data)) try: upload_param = "uploadedfile" if upload_param in data and data[upload_param]: import_service = ImportService() import_service.import_project_structure(data[upload_param], common.get_logged_user().id) except ServicesBaseException, excep: self.logger.warning(excep.message) common.set_error_message(excep.message)
def _update_localconnectivity_metadata(folder, file_name): service = ImportService() operation_id = int(os.path.split(folder)[1]) dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False) info_dict = SparseMatrix.extract_sparse_matrix_metadata(dt.matrix) dt.set_metadata(info_dict, '', True, SparseMatrix.ROOT_PATH + 'matrix')
def update(): """ Try to import Default_Project, so that new users created with the latest code can share this project. """ try: admins = dao.get_administrators() service = ImportService() service.import_project_structure(DATA_FILE, admins[0].id) except Exception: LOGGER.exception("Could import DefaultProject!")
def update_localconnectivity_metadata(folder, file_name): service = ImportService() operation_id = int(os.path.split(folder)[1]) dt = service.load_datatype_from_file(os.path.join(folder, file_name), operation_id) info_dict = {"dtype": dt.matrix.dtype.str, "format": dt.matrix.format, "Shape": str(dt.matrix.shape), "Maximum": dt.matrix.data.max(), "Minimum": dt.matrix.data.min(), "Mean": dt.matrix.mean()} dt.set_metadata(info_dict, '', True, '/matrix')
def load_simulation_from_zip(self, zip_file, project): import_service = ImportService() simulator_folder = import_service.import_simulator_configuration_zip(zip_file) simulator_h5_filename = DirLoader(simulator_folder, None).find_file_for_has_traits_type(SimulatorAdapterModel) simulator_h5_filepath = os.path.join(simulator_folder, simulator_h5_filename) simulator = h5.load_view_model_from_file(simulator_h5_filepath) burst_config = self.load_burst_configuration_from_folder(simulator_folder, project) burst_config_copy = burst_config.clone() simulator.generic_attributes.parent_burst = burst_config_copy.gid return simulator, burst_config_copy, simulator_folder
def update(input_file): """ :param input_file: the file that needs to be converted to a newer file storage version. """ if not os.path.isfile(input_file): raise FileVersioningException("The input path %s received for upgrading from 3 -> 4 is not a " "valid file on the disk." % input_file) folder, file_name = os.path.split(input_file) storage_manager = HDF5StorageManager(folder, file_name) root_metadata = storage_manager.get_metadata() class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME] if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata: LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder)) projection_type = projections_data.EEG_POLYMORPHIC_IDENTITY if "SEEG" in class_name: projection_type = projections_data.SEEG_POLYMORPHIC_IDENTITY elif "MEG" in class_name: projection_type = projections_data.MEG_POLYMORPHIC_IDENTITY root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type) LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type)) elif "TimeSeries" in class_name: LOGGER.info("Updating TS %s from %s" % (file_name, folder)) service = ImportService() operation_id = int(os.path.split(folder)[1]) dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False) dt_db = dao.get_datatype_by_gid(dt.gid) if dt_db is not None: # DT already in DB (update of own storage, by making sure all fields are being correctly populated) dt_db.configure() dt_db.persist_full_metadata() # restore in DB, in case TVB 1.4 had wrongly imported flags dao.store_entity(dt_db) elif FIELD_SURFACE_MAPPING not in root_metadata: # Have default values, to avoid the full project not being imported root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False) root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False) root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION storage_manager.set_metadata(root_metadata)
def load_from_zip(self, zip_file, project): import_service = ImportService() simulator_folder = import_service.import_simulator_configuration_zip( zip_file) simulator_h5_filename = DirLoader( simulator_folder, None).find_file_for_has_traits_type(Simulator) with SimulatorH5(os.path.join(simulator_folder, simulator_h5_filename)) as sim_h5: simulator_gid = sim_h5.gid.load() simulator = SimulatorSerializer.deserialize_simulator( simulator_gid, simulator_folder) burst_config = self.burst_service.load_burst_configuration_from_folder( simulator_folder, project) return simulator, burst_config
def transactional_setup_method(self): """ Reset the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc") self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter() TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project) self.zip_path = None
def transactional_setup_method(self): """ Prepare the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.delete_project_folders() result = self.count_all_entities(DataType) assert 0 == result, "There should be no data type in DB" result = self.count_all_entities(Project) assert 0 == result self.test_project = TestFactory.import_default_project(self.test_user) self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project)
def setUp(self): """ Prepare the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.delete_project_folders() result = self.count_all_entities(DataType) self.assertEqual(0, result, "There should be no data type in DB") result = self.count_all_entities(Project) self.assertEqual(0, result) self.test_project = TestFactory.import_default_project(self.test_user) self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
def read_h5(full_paths): # We need to load the DataType in the context of an operation, and a project all_projects = dao.get_all_projects() all_operations = dao.get_generic_entity(Operation, all_projects[0].id, "fk_launched_in") service = ImportService() results = [] for full_path in full_paths: # The actual read of H5: datatype = service.load_datatype_from_file(full_path, all_operations[0].id) print("We've build DataType: [%s]" % datatype.__class__.__name__, datatype) results.append(datatype) return results
def transactional_setup_method(self): """ Reset the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc") self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter() zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.zip_path = None
def run_import(project_path): ## If we would know a UserID to have as admin, next step would not be necessary. ## Make sure at least one user exists in TVB DB: user_service = UserService() admins = user_service.get_administrators() if admins: admin = admins[0] else: ## No Admin user was found, we will create one user_service.create_user("admin", "pass", role=model.ROLE_ADMINISTRATOR, email="*****@*****.**", validated=True, skip_import=True) admin = user_service.get_administrators()[0] ## Do the actual import of a project from ZIP: import_service = ImportService() import_service.import_project_structure(project_path, admin.id) print("Project imported successfully. Check the Web UI!")
def _import(self, export_file, user_id): """ import a project zip for a user """ # instantiated for every use because it is stateful import_service = ImportService() import_service.import_project_structure(export_file, user_id) return import_service.created_projects[0].id
def launch(self, view_model): # type: (TVBImporterModel) -> [] """ Execute import operations: unpack ZIP, build and store generic DataType objects. :raises LaunchException: when data_file is None, nonexistent, or invalid \ (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. ) """ if view_model.data_file is None: raise LaunchException( "Please select file which contains data to import") service = ImportService() if os.path.exists(view_model.data_file): current_op = dao.get_operation_by_id(self.operation_id) if zipfile.is_zipfile(view_model.data_file): # Creates a new TMP folder where to extract data tmp_folder = os.path.join(self.storage_path, "tmp_import") self.storage_interface.unpack_zip(view_model.data_file, tmp_folder) is_group = False current_op_id = current_op.id for file in os.listdir(tmp_folder): # In case we import a DatatypeGroup, we want the default import flow if os.path.isdir(os.path.join(tmp_folder, file)): current_op_id = None is_group = True break try: operations, all_dts, stored_dts_count = service.import_project_operations( current_op.project, tmp_folder, is_group, current_op_id) self.nr_of_datatypes += stored_dts_count if stored_dts_count == 0: current_op.additional_info = 'All chosen datatypes already exist!' dao.store_entity(current_op) elif stored_dts_count < all_dts: current_op.additional_info = 'Part of the chosen datatypes already exist!' dao.store_entity(current_op) except ImportException as excep: self.log.exception(excep) current_op.additional_info = excep.message current_op.status = STATUS_ERROR raise LaunchException("Invalid file received as input. " + str(excep)) finally: shutil.rmtree(tmp_folder) else: # upgrade file if necessary file_update_manager = FilesUpdateManager() file_update_manager.upgrade_file(view_model.data_file) if self.storage_interface.get_storage_manager( view_model.data_file).is_valid_tvb_file(): datatype = None try: datatype = service.load_datatype_from_file( view_model.data_file, self.operation_id) stored_new_dt = service.store_or_link_datatype( datatype, view_model.data_file, current_op.project.id) if stored_new_dt == 0: current_op.additional_info = 'The chosen datatype already exists!' dao.store_entity(current_op) self.nr_of_datatypes += stored_new_dt except ImportException as excep: self.log.exception(excep) if datatype is not None: target_path = h5.path_for_stored_index(datatype) if os.path.exists(target_path): os.remove(target_path) raise LaunchException( "Invalid file received as input. " + str(excep)) else: raise LaunchException( "Uploaded file: %s is neither in ZIP or HDF5 format" % view_model.data_file) else: raise LaunchException("File: %s to import does not exists." % view_model.data_file)