def upgrade_project(self, project: Project) -> None: assert False assert project in self.projects if project.needs_upgrade: legacy_path = project.storage_system_path.parent target_project_path = legacy_path.with_suffix(".nsproj") target_data_path = legacy_path.parent / (str(legacy_path.stem) + " Data") logging.getLogger("loader").info( f"Created new project {target_project_path} {target_data_path}" ) target_project_uuid = uuid.uuid4() target_project_data_json = json.dumps({ "version": FileStorageSystem.PROJECT_VERSION, "uuid": str(target_project_uuid), "project_data_folders": [str(target_data_path.stem)] }) target_project_path.write_text(target_project_data_json, "utf-8") with contextlib.closing( FileStorageSystem.FileProjectStorageSystem( target_project_path)) as new_storage_system: new_storage_system.load_properties() FileStorageSystem.migrate_to_latest( project.project_storage_system, new_storage_system) self.remove_project_reference(project) self.read_project(self.add_project_index(target_project_path))
def _upgrade_project_storage_system( self, project_storage_system: FileStorageSystem.ProjectStorageSystem ) -> ProjectReference: legacy_path = pathlib.Path(project_storage_system.get_identifier()) target_project_path = legacy_path.parent.with_suffix(".nsproj") target_data_path = target_project_path.with_name( target_project_path.stem + " Data") if target_project_path.exists() or target_data_path.exists(): raise FileExistsError() logging.getLogger("loader").info( f"Created new project {target_project_path} {target_data_path}") target_project_uuid = uuid.uuid4() target_project_data_json = json.dumps({ "version": FileStorageSystem.PROJECT_VERSION, "uuid": str(target_project_uuid), "project_data_folders": [str(target_data_path.stem)] }) target_project_path.write_text(target_project_data_json, "utf-8") with contextlib.closing( FileStorageSystem.FileProjectStorageSystem( target_project_path)) as new_storage_system: new_storage_system.load_properties() FileStorageSystem.migrate_to_latest(project_storage_system, new_storage_system) new_project_reference = IndexProjectReference() new_project_reference.project_path = target_project_path new_project_reference.project_uuid = target_project_uuid return new_project_reference
def __establish_profile( self, profile_path: pathlib.Path ) -> typing.Tuple[typing.Optional[Profile.Profile], bool]: assert profile_path.is_absolute( ) # prevents tests from creating temporary files in test directory create_new_profile = not profile_path.exists() if create_new_profile: logging.getLogger("loader").info( f"Creating new profile {profile_path}") profile_json = json.dumps({ "version": FileStorageSystem.PROFILE_VERSION, "uuid": str(uuid.uuid4()) }) profile_path.write_text(profile_json, "utf-8") else: logging.getLogger("loader").info( f"Using existing profile {profile_path}") storage_system = FileStorageSystem.FilePersistentStorageSystem( profile_path) storage_system.load_properties() cache_path = profile_path.parent / pathlib.Path( profile_path.stem + " Cache").with_suffix(".nscache") logging.getLogger("loader").info(f"Using cache {cache_path}") storage_cache = Cache.DbStorageCache(cache_path) profile = Profile.Profile(storage_system=storage_system, storage_cache=storage_cache) profile.read_profile() return profile, create_new_profile
def __init__(self, storage_system=None, storage_cache=None, *, auto_project: bool = True): super().__init__() self.define_root_context() self.define_type("profile") self.define_relationship("workspaces", WorkspaceLayout.factory) self.define_relationship("data_groups", DataGroup.data_group_factory) self.define_relationship("project_references", project_reference_factory, insert=self.__insert_project_reference, remove=self.__remove_project_reference) self.define_property("workspace_uuid", converter=Converter.UuidToStringConverter()) self.define_property( "data_item_references", dict(), hidden=True ) # map string key to data item, used for data acquisition channels self.define_property( "data_item_variables", dict(), hidden=True ) # map string key to data item, used for reference in scripts self.define_property("target_project_reference_uuid", converter=Converter.UuidToStringConverter(), changed=self.__property_changed) self.define_property("work_project_reference_uuid", converter=Converter.UuidToStringConverter(), changed=self.__property_changed) self.define_property("closed_items", list()) self.storage_system = storage_system or FileStorageSystem.MemoryPersistentStorageSystem( ) self.storage_system.load_properties() self.__work_project_reference: typing.Optional[ProjectReference] = None self.__target_project_reference: typing.Optional[ ProjectReference] = None self.storage_cache = storage_cache or Cache.DictStorageCache( ) # need to deallocate self.set_storage_system(self.storage_system) self.__document_model = None self.profile_context = None # helper object to produce the projects sequence oo = Observer.ObserverBuilder() oo.source(self).ordered_sequence_from_array("project_references").map( oo.x.prop("project")).filter(lambda x: x is not None).trampoline( self, "projects") self.__projects_observer = oo.make_observable() self.__is_read = False if auto_project: self.profile_context = MemoryProfileContext() project_reference = self.add_project_memory() self.work_project_reference_uuid = project_reference.uuid self.target_project_reference_uuid = project_reference.uuid
def make_storage( self, profile_context: typing.Optional[ProfileContext] ) -> typing.Optional[FileStorageSystem.ProjectStorageSystem]: if self.project_folder_path: return FileStorageSystem.make_folder_project_storage_system( self.project_folder_path) return None
def make_storage( self, profile_context: typing.Optional[MemoryProfileContext] ) -> typing.Optional[FileStorageSystem.ProjectStorageSystem]: if self.__make_storage_error: raise Exception("make_storage_error") return FileStorageSystem.make_memory_project_storage_system( profile_context, self.project_uuid, self.__d)
def __init__(self) -> None: self.storage_cache = Cache.DictStorageCache() self.profile_properties = dict() self.__storage_system = FileStorageSystem.MemoryPersistentStorageSystem( library_properties=self.profile_properties) self.__storage_system.load_properties() # these contain the data for each project. self.x_project_properties = dict() self.x_data_properties_map = dict() self.x_data_map = dict() self.x_trash_map = dict() # these contain the data for the first created project. they also facilitate legacy project testing. self.project_uuid = None self.project_properties = None self.data_properties_map = None self.data_map = None self.trash_map = None self._test_data_read_event = Event.Event() self.__profile = None self.__items_exit = list()
def register_data_item(self, item, item_uuid, storage_handler, properties: dict) -> None: assert item_uuid not in self.__data_item_storage_adapters storage = FileStorageSystem.DataItemStorageAdapter( self, storage_handler, properties) self.__data_item_storage_adapters[item_uuid] = storage if item and self.is_write_delayed(item): storage.set_write_delayed(item, True)
def make_storage( self, profile_context: typing.Optional[ProfileContext] ) -> typing.Optional[FileStorageSystem.ProjectStorageSystem]: project_path = self.project_path if project_path: return FileStorageSystem.make_index_project_storage_system( project_path) return None
def __init__( self, storage_system: typing.Optional[ FileStorageSystem.PersistentStorageSystem] = None, storage_cache: typing.Optional[Cache.CacheLike] = None, *, profile_context: typing.Optional[ProfileContext] = None) -> None: super().__init__() self.__class__.count += 1 self.define_root_context() self.define_type("profile") self.define_property("last_project_reference", converter=Converter.UuidToStringConverter(), hidden=True) self.define_property("work_project_reference_uuid", converter=Converter.UuidToStringConverter(), hidden=True) self.define_property("closed_items", list(), hidden=True) self.define_property("script_items_updated", False, changed=self.__property_changed, hidden=True) self.define_relationship("project_references", project_reference_factory, insert=self.__insert_project_reference, remove=self.__remove_project_reference, hidden=True) self.define_relationship( "script_items", typing.cast( typing.Callable[[typing.Callable[[str], str]], typing.Optional[Persistence.PersistentObject]], script_item_factory), hidden=True) self.storage_system = storage_system or FileStorageSystem.MemoryPersistentStorageSystem( ) self.storage_system.load_properties() self.storage_cache: typing.Any = storage_cache or Cache.DictStorageCache( ) # need to deallocate self.set_storage_system(self.storage_system) self.profile_context = None # helper object to produce the projects sequence oo = Observer.ObserverBuilder() oo.source(typing.cast( Observer.ItemValue, self)).ordered_sequence_from_array("project_references").map( oo.x.prop("project")).filter( lambda x: x is not None).trampoline(self, "projects") self.__projects_observer = oo.make_observable() if profile_context: self.profile_context = profile_context self.add_project_memory()
def create_profile(workspace_dir: pathlib.Path, do_logging: bool, force_create: bool) -> typing.Tuple[typing.Optional[Profile], bool]: library_path = _migrate_library(workspace_dir, do_logging) if not force_create and not os.path.exists(library_path): return None, False create_new_document = not os.path.exists(library_path) if do_logging: if create_new_document: logging.info(f"Creating new document: {library_path}") else: logging.info(f"Using existing document {library_path}") auto_migrations = list() auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data"])) auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 10"])) auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 11"])) auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Library 12.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 12"])) # NOTE: when adding an AutoMigration here, also add the corresponding file copy in _migrate_library storage_system = FileStorageSystem.FileStorageSystem(library_path, [pathlib.Path(workspace_dir) / f"Nion Swift Data {DataItem.DataItem.storage_version}"], auto_migrations=auto_migrations) cache_filename = f"Nion Swift Cache {DataItem.DataItem.storage_version}.nscache" cache_path = workspace_dir / cache_filename storage_cache = Cache.DbStorageCache(cache_path) return Profile(storage_system=storage_system, storage_cache=storage_cache, ignore_older_files=False), create_new_document
def __init__(self, storage_system=None, storage_cache=None, *, profile_context: typing.Optional[ProfileContext] = None): super().__init__() self.__class__.count += 1 self.define_root_context() self.define_type("profile") self.define_relationship("project_references", project_reference_factory, insert=self.__insert_project_reference, remove=self.__remove_project_reference) self.define_property("last_project_reference", converter=Converter.UuidToStringConverter()) self.define_property("work_project_reference_uuid", converter=Converter.UuidToStringConverter()) self.define_property("closed_items", list()) self.storage_system = storage_system or FileStorageSystem.MemoryPersistentStorageSystem( ) self.storage_system.load_properties() self.storage_cache = storage_cache or Cache.DictStorageCache( ) # need to deallocate self.set_storage_system(self.storage_system) self.profile_context = None # helper object to produce the projects sequence oo = Observer.ObserverBuilder() oo.source(self).ordered_sequence_from_array("project_references").map( oo.x.prop("project")).filter(lambda x: x is not None).trampoline( self, "projects") self.__projects_observer = oo.make_observable() if profile_context: self.profile_context = profile_context self.add_project_memory()
def get_storage_system(workspace_dir): # This function is adapted from Swift's profile workspace_dir = pathlib.Path(workspace_dir) library_path = Profile._migrate_library(workspace_dir, do_logging=True) this_storage_version = DataItem.DataItem.storage_version auto_migrations = list() auto_migrations.append( Profile.AutoMigration( pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data"])) auto_migrations.append( Profile.AutoMigration( pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 10"])) auto_migrations.append( Profile.AutoMigration( pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 11"])) # Attemp at being future proof if this_storage_version > 12: for storage_version in range(12, this_storage_version): auto_migrations.append( Profile.AutoMigration( pathlib.Path(workspace_dir) / f"Nion Swift Library {storage_version}.nslib", [ pathlib.Path(workspace_dir) / f"Nion Swift Data {storage_version}" ])) # NOTE: when adding an AutoMigration here, also add the corresponding file # copy in _migrate_library storage_system = FileStorageSystem.FileStorageSystem( library_path, [ pathlib.Path(workspace_dir) / f"Nion Swift Data {this_storage_version}" ], auto_migrations=auto_migrations) return storage_system
def read_data_items_version_stats(self): return FileStorageSystem.read_data_items_version_stats(self)
def make_storage( self, profile_context: typing.Optional["MemoryProfileContext"] ) -> typing.Optional[FileStorageSystem.ProjectStorageSystem]: return FileStorageSystem.make_index_project_storage_system( self.project_path)
def read_library(self, ignore_older_files) -> typing.Dict: return FileStorageSystem.read_library(self, ignore_older_files)
def make_storage( self, profile_context: typing.Optional["MemoryProfileContext"] ) -> typing.Optional[FileStorageSystem.ProjectStorageSystem]: return FileStorageSystem.make_memory_project_storage_system( profile_context, self.project_uuid, self.__d)