def test_ndata_handler_basic_functionality(self): now = datetime.datetime.now() current_working_directory = os.getcwd() data_dir = os.path.join(current_working_directory, "__Test") Cache.db_make_directory_if_needed(data_dir) try: h = NDataHandler.NDataHandler(os.path.join(data_dir, "abc.ndata")) with contextlib.closing(h): p = {u"abc": 1, u"def": u"bcd", u"uuid": str(uuid.uuid4())} # write properties h.write_properties(p, now) self.assertEqual(h.read_properties(), p) self.assertIsNone(h.read_data()) # write data h.write_data(numpy.zeros((4,4), dtype=numpy.float64), now) self.assertEqual(h.read_properties(), p) d = h.read_data() self.assertEqual(d.shape, (4, 4)) self.assertEqual(d.dtype, numpy.float64) # rewrite data h.write_data(numpy.zeros((12,12), dtype=numpy.float32), now) self.assertEqual(h.read_properties(), p) d = h.read_data() self.assertEqual(d.shape, (12, 12)) self.assertEqual(d.dtype, numpy.float32) # rewrite properties h.write_properties(p, now) self.assertEqual(h.read_properties(), p) d = h.read_data() self.assertEqual(d.shape, (12, 12)) self.assertEqual(d.dtype, numpy.float32) finally: #logging.debug("rmtree %s", data_dir) shutil.rmtree(data_dir)
def test_get_cached_value_works_while_suspended(self): suspendable_cache = Cache.SuspendableCache(Cache.DictStorageCache()) suspendable_cache.uuid = uuid.uuid4() suspendable_cache.set_cached_value(suspendable_cache, "key", 999, False) suspendable_cache.suspend_cache() self.assertEqual(suspendable_cache.get_cached_value(suspendable_cache, "key", None), 999) suspendable_cache.spill_cache()
def test_is_cached_value_dirty_works_while_suspended(self): suspendable_cache = Cache.SuspendableCache(Cache.DictStorageCache()) suspendable_cache.uuid = uuid.uuid4() suspendable_cache.set_cached_value(suspendable_cache, "key", 999, False) suspendable_cache.set_cached_value_dirty(suspendable_cache, "key", False) suspendable_cache.suspend_cache() self.assertFalse(suspendable_cache.is_cached_value_dirty(suspendable_cache, "key")) suspendable_cache.spill_cache()
def test_spill_does_not_remove_value_that_has_been_set(self): suspendable_cache = Cache.SuspendableCache(Cache.DictStorageCache()) suspendable_cache.uuid = uuid.uuid4() suspendable_cache.remove_cached_value(suspendable_cache, "key") suspendable_cache.set_cached_value(suspendable_cache, "key", True, False) suspendable_cache.suspend_cache() suspendable_cache.spill_cache() self.assertTrue(suspendable_cache.get_cached_value(suspendable_cache, "key", False))
def test_ndata_handler_rewrites_reversed_zip_file(self): now = datetime.datetime.now() current_working_directory = os.getcwd() data_dir = os.path.join(current_working_directory, "__Test") Cache.db_make_directory_if_needed(data_dir) try: p = {u"abc": 1, u"def": u"bcd", u"uuid": str(uuid.uuid4())} d = numpy.zeros((12,12), dtype=numpy.float32) # write zip file where metadata is first with open(os.path.join(data_dir, "file.ndata"), "w+b") as fp: dir_data_list = list() dt = now properties = p data = d if properties is not None: json_io = io.StringIO() json.dump(properties, json_io) json_str = json_io.getvalue() def write_json(fp): json_bytes = bytes(json_str, 'ISO-8859-1') fp.write(json_bytes) return binascii.crc32(json_bytes) & 0xFFFFFFFF offset_json = fp.tell() json_len, json_crc32 = NDataHandler.write_local_file(fp, b"metadata.json", write_json, dt) dir_data_list.append((offset_json, b"metadata.json", json_len, json_crc32)) if data is not None: offset_data = fp.tell() def write_data(fp): numpy_start_pos = fp.tell() numpy.save(fp, data) numpy_end_pos = fp.tell() fp.seek(numpy_start_pos) header_data = fp.read((numpy_end_pos - numpy_start_pos) - data.nbytes) # read the header data_crc32 = binascii.crc32(data.data, binascii.crc32(header_data)) & 0xFFFFFFFF fp.seek(numpy_end_pos) return data_crc32 data_len, crc32 = NDataHandler.write_local_file(fp, b"data.npy", write_data, dt) dir_data_list.append((offset_data, b"data.npy", data_len, crc32)) dir_offset = fp.tell() for offset, name_bytes, data_len, crc32 in dir_data_list: NDataHandler.write_directory_data(fp, offset, name_bytes, data_len, crc32, dt) dir_size = fp.tell() - dir_offset NDataHandler.write_end_of_directory(fp, dir_size, dir_offset, len(dir_data_list)) fp.truncate() # make sure read works h = NDataHandler.NDataHandler(os.path.join(data_dir, "file.ndata")) with contextlib.closing(h): self.assertEqual(h.read_properties(), p) dd = h.read_data() self.assertEqual(dd.shape, d.shape) self.assertEqual(dd.dtype, d.dtype) # now rewrite h.write_properties(p, now) finally: #logging.debug("rmtree %s", data_dir) shutil.rmtree(data_dir)
def handle_new(): self.library_name = self.__library_name_field.text workspace_dir = os.path.join(self.directory, self.library_name) Cache.db_make_directory_if_needed(workspace_dir) path = os.path.join(workspace_dir, "Nion Swift Workspace.nslib") if not os.path.exists(path): with open(path, "w") as fp: json.dump({}, fp) if os.path.exists(path): app.switch_library(workspace_dir) return True return False
def __init__(self) -> None: self.storage_cache = Cache.DictStorageCache() self.profile_properties = dict() self.__storage_system = FileStorageSystem.MemoryPersistentStorageSystem( library_properties=self.profile_properties) self.__storage_system.load_properties() # these contain the data for each project. self.x_project_properties = dict() self.x_data_properties_map = dict() self.x_data_map = dict() self.x_trash_map = dict() # these contain the data for the first created project. they also facilitate legacy project testing. self.project_uuid = None self.project_properties = None self.data_properties_map = None self.data_map = None self.trash_map = None self._test_data_read_event = Event.Event() self.__profile = None self.__items_exit = list()
def __establish_profile( self, profile_path: pathlib.Path ) -> typing.Tuple[typing.Optional[Profile.Profile], bool]: assert profile_path.is_absolute( ) # prevents tests from creating temporary files in test directory create_new_profile = not profile_path.exists() if create_new_profile: logging.getLogger("loader").info( f"Creating new profile {profile_path}") profile_json = json.dumps({ "version": FileStorageSystem.PROFILE_VERSION, "uuid": str(uuid.uuid4()) }) profile_path.write_text(profile_json, "utf-8") else: logging.getLogger("loader").info( f"Using existing profile {profile_path}") storage_system = FileStorageSystem.FilePersistentStorageSystem( profile_path) storage_system.load_properties() cache_path = profile_path.parent / pathlib.Path( profile_path.stem + " Cache").with_suffix(".nscache") logging.getLogger("loader").info(f"Using cache {cache_path}") storage_cache = Cache.DbStorageCache(cache_path) profile = Profile.Profile(storage_system=storage_system, storage_cache=storage_cache) profile.read_profile() return profile, create_new_profile
def __init__(self, storage_system=None, storage_cache=None, ignore_older_files=False): self.__storage_system = storage_system if storage_system else MemoryStorageSystem.MemoryStorageSystem() self.__ignore_older_files = ignore_older_files self.storage_cache = storage_cache if storage_cache else Cache.DictStorageCache() # the persistent object context allows reading/writing of objects to the persistent storage specific to them. # there is a single shared object context per profile. self.persistent_object_context = Persistence.PersistentObjectContext()
def test_ndata_handles_corrupt_data(self): logging.getLogger().setLevel(logging.DEBUG) now = datetime.datetime.now() current_working_directory = os.getcwd() data_dir = os.path.join(current_working_directory, "__Test") Cache.db_make_directory_if_needed(data_dir) try: zero_path = os.path.join(data_dir, "zeros.ndata") with open(zero_path, 'wb') as f: f.write(bytearray(1024)) with self.assertRaises(IOError): with open(zero_path, "rb") as fp: NDataHandler.parse_zip(fp) finally: #logging.debug("rmtree %s", data_dir) shutil.rmtree(data_dir)
def __init__(self, storage_system=None, storage_cache=None, *, auto_project: bool = True): super().__init__() self.define_root_context() self.define_type("profile") self.define_relationship("workspaces", WorkspaceLayout.factory) self.define_relationship("data_groups", DataGroup.data_group_factory) self.define_relationship("project_references", project_reference_factory, insert=self.__insert_project_reference, remove=self.__remove_project_reference) self.define_property("workspace_uuid", converter=Converter.UuidToStringConverter()) self.define_property( "data_item_references", dict(), hidden=True ) # map string key to data item, used for data acquisition channels self.define_property( "data_item_variables", dict(), hidden=True ) # map string key to data item, used for reference in scripts self.define_property("target_project_reference_uuid", converter=Converter.UuidToStringConverter(), changed=self.__property_changed) self.define_property("work_project_reference_uuid", converter=Converter.UuidToStringConverter(), changed=self.__property_changed) self.define_property("closed_items", list()) self.storage_system = storage_system or FileStorageSystem.MemoryPersistentStorageSystem( ) self.storage_system.load_properties() self.__work_project_reference: typing.Optional[ProjectReference] = None self.__target_project_reference: typing.Optional[ ProjectReference] = None self.storage_cache = storage_cache or Cache.DictStorageCache( ) # need to deallocate self.set_storage_system(self.storage_system) self.__document_model = None self.profile_context = None # helper object to produce the projects sequence oo = Observer.ObserverBuilder() oo.source(self).ordered_sequence_from_array("project_references").map( oo.x.prop("project")).filter(lambda x: x is not None).trampoline( self, "projects") self.__projects_observer = oo.make_observable() self.__is_read = False if auto_project: self.profile_context = MemoryProfileContext() project_reference = self.add_project_memory() self.work_project_reference_uuid = project_reference.uuid self.target_project_reference_uuid = project_reference.uuid
def __init__( self, storage_system: typing.Optional[ FileStorageSystem.PersistentStorageSystem] = None, storage_cache: typing.Optional[Cache.CacheLike] = None, *, profile_context: typing.Optional[ProfileContext] = None) -> None: super().__init__() self.__class__.count += 1 self.define_root_context() self.define_type("profile") self.define_property("last_project_reference", converter=Converter.UuidToStringConverter(), hidden=True) self.define_property("work_project_reference_uuid", converter=Converter.UuidToStringConverter(), hidden=True) self.define_property("closed_items", list(), hidden=True) self.define_property("script_items_updated", False, changed=self.__property_changed, hidden=True) self.define_relationship("project_references", project_reference_factory, insert=self.__insert_project_reference, remove=self.__remove_project_reference, hidden=True) self.define_relationship( "script_items", typing.cast( typing.Callable[[typing.Callable[[str], str]], typing.Optional[Persistence.PersistentObject]], script_item_factory), hidden=True) self.storage_system = storage_system or FileStorageSystem.MemoryPersistentStorageSystem( ) self.storage_system.load_properties() self.storage_cache: typing.Any = storage_cache or Cache.DictStorageCache( ) # need to deallocate self.set_storage_system(self.storage_system) self.profile_context = None # helper object to produce the projects sequence oo = Observer.ObserverBuilder() oo.source(typing.cast( Observer.ItemValue, self)).ordered_sequence_from_array("project_references").map( oo.x.prop("project")).filter( lambda x: x is not None).trampoline(self, "projects") self.__projects_observer = oo.make_observable() if profile_context: self.profile_context = profile_context self.add_project_memory()
def test_hdf5_handler_basic_functionality(self): now = datetime.datetime.now() current_working_directory = pathlib.Path.cwd() data_dir = current_working_directory / "__Test" if data_dir.exists(): shutil.rmtree(data_dir) Cache.db_make_directory_if_needed(data_dir) try: h = HDF5Handler.HDF5Handler(os.path.join(data_dir, "abc.h5")) with contextlib.closing(h): p = {u"abc": 1, u"def": u"bcd", u"uuid": str(uuid.uuid4())} # write properties h.write_properties(p, now) self.assertEqual(h.read_properties(), p) self.assertIsNone(h.read_data()) # write data h.write_data(numpy.zeros((4, 4), dtype=numpy.float64), now) self.assertEqual(h.read_properties(), p) d = h.read_data() self.assertEqual(d.shape, (4, 4)) self.assertEqual(d.dtype, numpy.float64) # rewrite data h.write_data(numpy.zeros((12, 12), dtype=numpy.float32), now) self.assertEqual(h.read_properties(), p) d = h.read_data() self.assertEqual(d.shape, (12, 12)) self.assertEqual(d.dtype, numpy.float32) # rewrite properties h.write_properties(p, now) self.assertEqual(h.read_properties(), p) d = h.read_data() self.assertEqual(d.shape, (12, 12)) self.assertEqual(d.dtype, numpy.float32) # reserve data h.reserve_data((3, 15), numpy.float32, now) self.assertEqual(h.read_properties(), p) d = h.read_data() self.assertEqual(d.shape, (3, 15)) self.assertEqual(d.dtype, numpy.float32) self.assertTrue(numpy.allclose(d, 0)) finally: #logging.debug("rmtree %s", data_dir) shutil.rmtree(data_dir)
def test_switching_library_closes_document_only_once(self): current_working_directory = os.getcwd() workspace1_dir = os.path.join(current_working_directory, "__Test1") workspace2_dir = os.path.join(current_working_directory, "__Test2") Cache.db_make_directory_if_needed(workspace1_dir) Cache.db_make_directory_if_needed(workspace2_dir) try: app = Application.Application(TestUI.UserInterface(), set_global=False) app.initialize(load_plug_ins=False) app.start(True, fixed_workspace_dir=workspace1_dir) app.switch_library(workspace2_dir, skip_choose=True, fixed_workspace_dir=workspace2_dir) app.exit() app.deinitialize() finally: #logging.debug("rmtree %s", workspace_dir) shutil.rmtree(workspace1_dir) shutil.rmtree(workspace2_dir)
def test_ndata_handles_discontiguous_data(self): logging.getLogger().setLevel(logging.DEBUG) now = datetime.datetime.now() current_working_directory = os.getcwd() data_dir = os.path.join(current_working_directory, "__Test") Cache.db_make_directory_if_needed(data_dir) try: h = NDataHandler.NDataHandler(os.path.join(data_dir, "abc.ndata")) with contextlib.closing(h): data = numpy.random.randint(0, 10, size=(10, 10))[:,3] # discontiguous data self.assertFalse(data.flags['C_CONTIGUOUS']) p = {u"uuid": str(uuid.uuid4())} # write properties h.write_properties(p, now) # write data h.write_data(data, now) d = h.read_data() self.assertEqual(d.shape, data.shape) self.assertEqual(d.dtype, data.dtype) finally: #logging.debug("rmtree %s", data_dir) shutil.rmtree(data_dir)
def create_profile(workspace_dir: pathlib.Path, do_logging: bool, force_create: bool) -> typing.Tuple[typing.Optional[Profile], bool]: library_path = _migrate_library(workspace_dir, do_logging) if not force_create and not os.path.exists(library_path): return None, False create_new_document = not os.path.exists(library_path) if do_logging: if create_new_document: logging.info(f"Creating new document: {library_path}") else: logging.info(f"Using existing document {library_path}") auto_migrations = list() auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data"])) auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 10"])) auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Workspace.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 11"])) auto_migrations.append(AutoMigration(pathlib.Path(workspace_dir) / "Nion Swift Library 12.nslib", [pathlib.Path(workspace_dir) / "Nion Swift Data 12"])) # NOTE: when adding an AutoMigration here, also add the corresponding file copy in _migrate_library storage_system = FileStorageSystem.FileStorageSystem(library_path, [pathlib.Path(workspace_dir) / f"Nion Swift Data {DataItem.DataItem.storage_version}"], auto_migrations=auto_migrations) cache_filename = f"Nion Swift Cache {DataItem.DataItem.storage_version}.nscache" cache_path = workspace_dir / cache_filename storage_cache = Cache.DbStorageCache(cache_path) return Profile(storage_system=storage_system, storage_cache=storage_cache, ignore_older_files=False), create_new_document
def __init__(self, storage_system=None, storage_cache=None, *, profile_context: typing.Optional[ProfileContext] = None): super().__init__() self.__class__.count += 1 self.define_root_context() self.define_type("profile") self.define_relationship("project_references", project_reference_factory, insert=self.__insert_project_reference, remove=self.__remove_project_reference) self.define_property("last_project_reference", converter=Converter.UuidToStringConverter()) self.define_property("work_project_reference_uuid", converter=Converter.UuidToStringConverter()) self.define_property("closed_items", list()) self.storage_system = storage_system or FileStorageSystem.MemoryPersistentStorageSystem( ) self.storage_system.load_properties() self.storage_cache = storage_cache or Cache.DictStorageCache( ) # need to deallocate self.set_storage_system(self.storage_system) self.profile_context = None # helper object to produce the projects sequence oo = Observer.ObserverBuilder() oo.source(self).ordered_sequence_from_array("project_references").map( oo.x.prop("project")).filter(lambda x: x is not None).trampoline( self, "projects") self.__projects_observer = oo.make_observable() if profile_context: self.profile_context = profile_context self.add_project_memory()
def choose_library(self): pose_open_library_dialog_fn = self.pose_open_library_dialog workspace_history = self.ui.get_persistent_object("workspace_history", list()) nslib_paths = [os.path.join(file_path, "Nion Swift Workspace.nslib") for file_path in workspace_history] items = [(path, datetime.datetime.fromtimestamp(os.path.getmtime(path))) for path in nslib_paths if os.path.exists(path)] class ChooseLibraryDialog(Dialog.ActionDialog): def __init__(self, ui, app): super().__init__(ui, _("Choose Library")) current_item_ref = [None] def handle_choose(): current_item = current_item_ref[0] if current_item: app.switch_library(current_item) return True return False def handle_new(): workspace_dir = pose_open_library_dialog_fn() if workspace_dir: items.insert(0, (workspace_dir, datetime.datetime.now())) list_widget.items = items list_widget.set_selected_index(0) app.switch_library(workspace_dir) return True return False self.add_button(_("New..."), handle_new) self.add_button(_("Other..."), handle_new) self.add_button(_("Cancel"), lambda: True) self.add_button(_("Choose"), handle_choose) path_label = ui.create_label_widget() prompt_row = ui.create_row_widget() prompt_row.add_spacing(13) prompt_row.add(ui.create_label_widget(_("Which library do you want Nion Swift to use?"), properties={"stylesheet": "font-weight: bold"})) prompt_row.add_spacing(13) prompt_row.add_stretch() explanation1_row = ui.create_row_widget() explanation1_row.add_spacing(13) explanation1_row.add(ui.create_label_widget(_("You can select a library from the list, find another library, or create a new library."))) explanation1_row.add_spacing(13) explanation1_row.add_stretch() explanation2_row = ui.create_row_widget() explanation2_row.add_spacing(13) explanation2_row.add(ui.create_label_widget(_("The same library will be used the next time you open Nion Swift."))) explanation2_row.add_spacing(13) explanation2_row.add_stretch() def selection_changed(indexes): if len(indexes) == 1: item = items[list(indexes)[0]] current_item_ref[0] = os.path.dirname(item[0]) path_label.text = os.path.dirname(item[0]) else: current_item_ref[0] = None path_label.text = None def stringify_item(item): date_utc = item[1] tz_minutes = Utility.local_utcoffset_minutes(date_utc) date_local = date_utc + datetime.timedelta(minutes=tz_minutes) return str(os.path.basename(os.path.dirname(item[0]))) + " (" + date_local.strftime("%c") + ")" def item_selected(index): item = items[index] current_item_ref[0] = os.path.dirname(item[0]) path_label.text = os.path.dirname(item[0]) handle_choose() self.request_close() list_widget = Widgets.StringListWidget(ui, items=items, selection_style=Selection.Style.single_or_none, item_getter=stringify_item, border_color="#888", properties={"min-height": 200, "min-width": 560}) list_widget.on_selection_changed = selection_changed list_widget.on_item_selected = item_selected list_widget.on_cancel = self.request_close if len(items) > 0: list_widget.set_selected_index(0) items_row = ui.create_row_widget() items_row.add_spacing(13) items_row.add(list_widget) items_row.add_spacing(13) items_row.add_stretch() path_row = ui.create_row_widget() path_row.add_spacing(13) path_row.add(path_label) path_row.add_spacing(13) path_row.add_stretch() column = ui.create_column_widget() column.add_spacing(18) column.add(prompt_row) column.add_spacing(6) column.add(explanation1_row) column.add(explanation2_row) column.add_spacing(12) column.add(items_row) column.add_spacing(6) column.add(path_row) column.add_spacing(6) column.add_stretch() self.content.add(column) self.__list_widget = list_widget def show(self): super().show() self.__list_widget.focused = True if len(items) == 0: # for initial startup (or with no preferences) try to use a default location # to avoid the user having to go through the horrendous choose dialog immediately. try: documents_dir = self.ui.get_document_location() workspace_dir = os.path.join(documents_dir, "Nion Swift Library") Cache.db_make_directory_if_needed(workspace_dir) path = os.path.join(workspace_dir, "Nion Swift Workspace.nslib") if not os.path.exists(path): with open(path, "w") as fp: json.dump({}, fp) if os.path.exists(path): app.switch_library(workspace_dir) return except Exception as e: pass choose_library_dialog = ChooseLibraryDialog(self.ui, self) choose_library_dialog.show()
def __init__(self): self.__storage_system = MemoryStorageSystem.MemoryStorageSystem() self.storage_cache = Cache.DictStorageCache()