class AppletSerializer(object): """ Base class for all AppletSerializers. """ # Force subclasses to override abstract methods and properties __metaclass__ = ABCMeta base_initialized = False # override if necessary version = "0.1" ######################### # Semi-abstract methods # ######################### def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath): """Child classes should override this function, if necessary. """ pass def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath, headless = False): """Child classes should override this function, if necessary. """ pass ############################# # Base class implementation # ############################# def __init__(self, topGroupName, slots=None, operator=None): """Constructor. Subclasses must call this method in their own __init__ functions. If they fail to do so, the shell raises an exception. Parameters: :param topGroupName: name of this applet's data group in the file. Defaults to the name of the operator. :param slots: a list of SerialSlots """ self.progressSignal = SimpleSignal() # Signature: emit(percentComplete) self.base_initialized = True self.topGroupName = topGroupName self.serialSlots = maybe(slots, []) self.operator = operator self.caresOfHeadless = False # should _deserializeFromHdf5 should be called with headless-argument? self._ignoreDirty = False def isDirty(self): """Returns true if the current state of this item (in memory) does not match the state of the HDF5 group on disk. Subclasses only need override this method if ORing the flags is not enough. """ return any(list(ss.dirty for ss in self.serialSlots)) @property def ignoreDirty(self): return self._ignoreDirty @ignoreDirty.setter def ignoreDirty(self, value): self._ignoreDirty = value for ss in self.serialSlots: ss.ignoreDirty = value def unload(self): """Called if either (1) the user closed the project or (2) the project opening process needs to be aborted for some reason (e.g. not all items could be deserialized properly due to a corrupted ilp) This way we can avoid invalid state due to a partially loaded project. """ for ss in self.serialSlots: ss.unload() def progressIncrement(self, group=None): """Get the percentage progress for each slot. :param group: If None, all all slots are assumed to be processed. Otherwise, decides for each slot by calling slot.shouldSerialize(group). """ if group is None: nslots = len(self.serialSlots) else: nslots = sum(ss.shouldSerialize(group) for ss in self.serialSlots) if nslots == 0: return 0 return divmod(100, nslots)[0] def serializeToHdf5(self, hdf5File, projectFilePath): """Serialize the current applet state to the given hdf5 file. Subclasses should **not** override this method. Instead, subclasses override the 'private' version, *_serializetoHdf5* :param hdf5File: An h5py.File handle to the project file, which should already be open :param projectFilePath: The path to the given file handle. (Most serializers do not use this parameter.) """ # Check the overall file version fileVersion = hdf5File["ilastikVersion"].value # Make sure we can find our way around the project tree if not isVersionCompatible(fileVersion): return topGroup = getOrCreateGroup(hdf5File, self.topGroupName) progress = 0 self.progressSignal.emit(progress) # Set the version key = 'StorageVersion' deleteIfPresent(topGroup, key) topGroup.create_dataset(key, data=self.version) try: inc = self.progressIncrement(topGroup) for ss in self.serialSlots: ss.serialize(topGroup) progress += inc self.progressSignal.emit(progress) # Call the subclass to do remaining work, if any self._serializeToHdf5(topGroup, hdf5File, projectFilePath) finally: self.progressSignal.emit(100) def deserializeFromHdf5(self, hdf5File, projectFilePath, headless = False): """Read the the current applet state from the given hdf5File handle, which should already be open. Subclasses should **not** override this method. Instead, subclasses override the 'private' version, *_deserializeFromHdf5* :param hdf5File: An h5py.File handle to the project file, which should already be open :param projectFilePath: The path to the given file handle. (Most serializers do not use this parameter.) :param headless: Are we called in headless mode? (in headless mode corrupted files cannot be fixed via the GUI) """ # Check the overall file version fileVersion = hdf5File["ilastikVersion"].value # Make sure we can find our way around the project tree if not isVersionCompatible(fileVersion): return self.progressSignal.emit(0) # If the top group isn't there, call initWithoutTopGroup try: topGroup = hdf5File[self.topGroupName] groupVersion = topGroup['StorageVersion'][()] except KeyError: topGroup = None groupVersion = None try: if topGroup is not None: inc = self.progressIncrement() for ss in self.serialSlots: ss.deserialize(topGroup) self.progressSignal.emit(inc) # Call the subclass to do remaining work if self.caresOfHeadless: self._deserializeFromHdf5(topGroup, groupVersion, hdf5File, projectFilePath, headless) else: self._deserializeFromHdf5(topGroup, groupVersion, hdf5File, projectFilePath) else: self.initWithoutTopGroup(hdf5File, projectFilePath) finally: self.progressSignal.emit(100) def repairFile(self,path,filt = None): """get new path to lost file""" from PyQt4.QtGui import QFileDialog,QMessageBox text = "The file at {} could not be found any more. Do you want to search for it at another directory?".format(path) c = QMessageBox.critical(None, "update external data",text, QMessageBox.Ok | QMessageBox.Cancel) if c == QMessageBox.Cancel: raise RuntimeError("Could not find external data: " + path) options = QFileDialog.Options() if ilastik_config.getboolean("ilastik", "debug"): options |= QFileDialog.DontUseNativeDialog fileName = QFileDialog.getOpenFileName( None, "repair files", path, filt, options=options) if fileName.isEmpty(): raise RuntimeError("Could not find external data: " + path) else: return str(fileName) ####################### # Optional methods # ####################### def initWithoutTopGroup(self, hdf5File, projectFilePath): """Optional override for subclasses. Called when there is no top group to deserialize. """ pass def updateWorkingDirectory(self,newdir,olddir): """Optional override for subclasses. Called when the working directory is changed and relative paths have to be updated. Child Classes should overwrite this method if they store relative paths.""" pass
class AppletSerializer(object): __metaclass__ = ABCMeta # Force subclasses to override abstract methods and properties _base_initialized = False #################### # Abstract methods # #################### @abstractmethod def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath): """ Write the applet's data to hdf5. Args: topGroup -- The hdf5Group object this serializer is responsible for hdf5File -- An hdf5File object (already open) projectFilePath -- The path to the project file (a string) (Most serializers don't need to use this parameter) """ raise NotImplementedError @abstractmethod def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath): """ Read the applet's data from hdf5. Args: topGroup -- The hdf5Group object this serializer is responsible for hdf5File -- An hdf5File object (already open) projectFilePath -- The path to the project file (a string) (Most serializers don't need to use this parameter) """ raise NotImplementedError @abstractmethod def isDirty(self): """ Return true if the current state of this item (in memory) does not match the state of the HDF5 group on disk. SerializableItems are responsible for tracking their own dirty/notdirty state """ raise NotImplementedError @abstractmethod def unload(self): """ Called if either (1) the user closed the project or (2) the project opening process needs to be aborted for some reason (e.g. not all items could be deserialized properly due to a corrupted ilp) This way we can avoid invalid state due to a partially loaded project. """ raise NotImplementedError ####################### # Optional methods # ####################### def initWithoutTopGroup(self, hdf5File, projectFilePath): """ Optional override for subclasses. Called when there is no top group to deserialize Gives the applet a chance to inspect the hdf5File or project path, even though no top group is present in the file. """ pass ####################### # Convenience methods # ####################### @classmethod def getOrCreateGroup(cls, parentGroup, groupName): """ Convenience helper. Returns parentGorup[groupName], creating first it if necessary. """ try: return parentGroup[groupName] except KeyError: return parentGroup.create_group(groupName) @classmethod def deleteIfPresent(cls, parentGroup, name): """ Convenience helper. Deletes parentGorup[groupName], if it exists. """ try: del parentGroup[name] except KeyError: pass @property def version(self): """ Return the version of the serializer itself. """ return self._version @property def topGroupName(self): return self._topGroupName ############################# # Base class implementation # ############################# def __init__(self, topGroupName, version): self._version = version self._topGroupName = topGroupName self.progressSignal = SimpleSignal( ) # Signature: emit(percentComplete) self._base_initialized = True def serializeToHdf5(self, hdf5File, projectFilePath): # Check the overall file version ilastikVersion = hdf5File["ilastikVersion"].value # Make sure we can find our way around the project tree if not VersionManager.isProjectFileVersionCompatible(ilastikVersion): return self.progressSignal.emit(0) topGroup = self.getOrCreateGroup(hdf5File, self.topGroupName) # Set the version if 'StorageVersion' not in topGroup.keys(): topGroup.create_dataset('StorageVersion', data=self._version) else: topGroup['StorageVersion'][()] = self._version try: # Call the subclass to do the actual work self._serializeToHdf5(topGroup, hdf5File, projectFilePath) finally: self.progressSignal.emit(100) def deserializeFromHdf5(self, hdf5File, projectFilePath): # Check the overall file version ilastikVersion = hdf5File["ilastikVersion"].value # Make sure we can find our way around the project tree if not VersionManager.isProjectFileVersionCompatible(ilastikVersion): return self.progressSignal.emit(0) # If the top group isn't there, call initWithoutTopGroup try: topGroup = hdf5File[self.topGroupName] groupVersion = topGroup['StorageVersion'][()] except KeyError: topGroup = None groupVersion = None try: if topGroup is not None: # Call the subclass to do the actual work self._deserializeFromHdf5(topGroup, groupVersion, hdf5File, projectFilePath) else: self.initWithoutTopGroup(hdf5File, projectFilePath) finally: self.progressSignal.emit(100) @property def base_initialized(self): """ Do not override this property. Used by the shell to ensure that Applet.__init__ was called by your subclass. """ return self._base_initialized
class SerialPredictionSlot(SerialSlot): def __init__(self, slot, operator, inslot=None, name=None, subname=None, default=None, depends=None, selfdepends=True): super(SerialPredictionSlot, self).__init__( slot, inslot, name, subname, default, depends, selfdepends ) self.operator = operator self.progressSignal = SimpleSignal() # Signature: emit(percentComplete) self._predictionStorageEnabled = False self._predictionStorageRequest = None self._predictionsPresent = False def setDirty(self, *args, **kwargs): self.dirty = True self._predictionsPresent = False @property def predictionStorageEnabled(self): return self._predictionStorageEnabled @predictionStorageEnabled.setter def predictionStorageEnabled(self, value): self._predictionStorageEnabled = value if not self._predictionsPresent: self.dirty = True def cancel(self): if self._predictionStorageRequest is not None: self.predictionStorageEnabled = False self._predictionStorageRequest.cancel() def shouldSerialize(self, group): result = super(SerialPredictionSlot,self).shouldSerialize(group) result &= self.predictionStorageEnabled return result def _disconnect(self): for i,slot in enumerate(self.operator.PredictionsFromDisk): slot.disconnect() def serialize(self, group): if not self.shouldSerialize(group): return self._disconnect() super(SerialPredictionSlot, self).serialize(group) self.deserialize(group) def _serialize(self, group, name, slot): """Called when the currently stored predictions are dirty. If prediction storage is currently enabled, store them to the file. Otherwise, just delete them/ (Avoid inconsistent project states, e.g. don't allow old predictions to be stored with a new classifier.) """ predictionDir = group.create_group(self.name) # Disconnect the operators that might be using the old data. self.deserialize(group) failedToSave = False opWriter = None try: num = len(slot) if num > 0: increment = 100 / float(num) progress = 0 for imageIndex in range(num): # Have we been cancelled? if not self.predictionStorageEnabled: break datasetName = self.subname.format(imageIndex) # Use a big dataset writer to do this in chunks opWriter = OpH5WriterBigDataset(graph=self.operator.graph, parent = self.operator.parent) opWriter.hdf5File.setValue(predictionDir) opWriter.hdf5Path.setValue(datasetName) opWriter.Image.connect(slot[imageIndex]) def handleProgress(percent): # Stop sending progress if we were cancelled if self.predictionStorageEnabled: curprogress = progress + percent * (increment / 100.0) self.progressSignal.emit(curprogress) opWriter.progressSignal.subscribe(handleProgress) # Create the request self._predictionStorageRequest = opWriter.WriteImage[...] # Must use a threading event here because if we wait on the # request from within a "real" thread, it refuses to be cancelled. finishedEvent = threading.Event() def handleFinish(result): finishedEvent.set() def handleCancel(): logger.info("Full volume prediction save CANCELLED.") self._predictionStorageRequest = None finishedEvent.set() # Trigger the write and wait for it to complete or cancel. self._predictionStorageRequest.notify_finished(handleFinish) self._predictionStorageRequest.notify_cancelled(handleCancel) self._predictionStorageRequest.submit() # Can't call wait(). See note above. finishedEvent.wait() progress += increment opWriter.cleanUp() opWriter = None except: failedToSave = True raise finally: if opWriter is not None: opWriter.cleanUp() # If we were cancelled, delete the predictions we just started if not self.predictionStorageEnabled or failedToSave: deleteIfPresent(group, name) def deserialize(self, group): # override because we need to set self._predictionsPresent self._predictionsPresent = self.name in group.keys() super(SerialPredictionSlot, self).deserialize(group) def _deserialize(self, group, slot): # Flush the GUI cache of any saved up dirty rois if self.operator.FreezePredictions.value == True: self.operator.FreezePredictions.setValue(False) self.operator.FreezePredictions.setValue(True) #self.operator.PredictionsFromDisk.resize(len(group)) if len(group.keys()) > 0: assert len(group.keys()) == len(self.operator.PredictionsFromDisk), "Expected to find the same number of on-disk predications as there are images loaded." else: for slot in self.operator.PredictionsFromDisk: slot.disconnect() for imageIndex, datasetName in enumerate(group.keys()): opStreamer = OpStreamingHdf5Reader(graph=self.operator.graph, parent=self.operator.parent) opStreamer.Hdf5File.setValue(group) opStreamer.InternalPath.setValue(datasetName) self.operator.PredictionsFromDisk[imageIndex].connect(opStreamer.OutputImage)
class SerialPredictionSlot(SerialSlot): def __init__(self, slot, operator, inslot=None, name=None, subname=None, default=None, depends=None, selfdepends=True): super(SerialPredictionSlot, self).__init__(slot, inslot, name, subname, default, depends, selfdepends) self.operator = operator self.progressSignal = SimpleSignal( ) # Signature: emit(percentComplete) self._predictionStorageEnabled = False self._predictionStorageRequest = None self._predictionsPresent = False def setDirty(self, *args, **kwargs): self.dirty = True self._predictionsPresent = False @property def predictionStorageEnabled(self): return self._predictionStorageEnabled @predictionStorageEnabled.setter def predictionStorageEnabled(self, value): self._predictionStorageEnabled = value if not self._predictionsPresent: self.dirty = True def cancel(self): if self._predictionStorageRequest is not None: self.predictionStorageEnabled = False self._predictionStorageRequest.cancel() def shouldSerialize(self, group): result = super(SerialPredictionSlot, self).shouldSerialize(group) result &= self.predictionStorageEnabled return result def _disconnect(self): for i, slot in enumerate(self.operator.PredictionsFromDisk): slot.disconnect() def serialize(self, group): if not self.shouldSerialize(group): return self._disconnect() super(SerialPredictionSlot, self).serialize(group) self.deserialize(group) def _serialize(self, group, name, slot): """Called when the currently stored predictions are dirty. If prediction storage is currently enabled, store them to the file. Otherwise, just delete them/ (Avoid inconsistent project states, e.g. don't allow old predictions to be stored with a new classifier.) """ predictionDir = group.create_group(self.name) # Disconnect the operators that might be using the old data. self.deserialize(group) failedToSave = False opWriter = None try: num = len(slot) if num > 0: increment = 100 / float(num) progress = 0 for imageIndex in range(num): # Have we been cancelled? if not self.predictionStorageEnabled: break datasetName = self.subname.format(imageIndex) # Use a big dataset writer to do this in chunks opWriter = OpH5WriterBigDataset(graph=self.operator.graph, parent=self.operator.parent) opWriter.hdf5File.setValue(predictionDir) opWriter.hdf5Path.setValue(datasetName) opWriter.Image.connect(slot[imageIndex]) def handleProgress(percent): # Stop sending progress if we were cancelled if self.predictionStorageEnabled: curprogress = progress + percent * (increment / 100.0) self.progressSignal.emit(curprogress) opWriter.progressSignal.subscribe(handleProgress) # Create the request self._predictionStorageRequest = opWriter.WriteImage[...] # Must use a threading event here because if we wait on the # request from within a "real" thread, it refuses to be cancelled. finishedEvent = threading.Event() def handleFinish(result): finishedEvent.set() def handleCancel(): logger.info("Full volume prediction save CANCELLED.") self._predictionStorageRequest = None finishedEvent.set() # Trigger the write and wait for it to complete or cancel. self._predictionStorageRequest.notify_finished(handleFinish) self._predictionStorageRequest.notify_cancelled(handleCancel) self._predictionStorageRequest.submit( ) # Can't call wait(). See note above. finishedEvent.wait() progress += increment opWriter.cleanUp() opWriter = None except: failedToSave = True raise finally: if opWriter is not None: opWriter.cleanUp() # If we were cancelled, delete the predictions we just started if not self.predictionStorageEnabled or failedToSave: deleteIfPresent(group, name) def deserialize(self, group): # override because we need to set self._predictionsPresent self._predictionsPresent = self.name in group.keys() super(SerialPredictionSlot, self).deserialize(group) def _deserialize(self, group, slot): # Flush the GUI cache of any saved up dirty rois if self.operator.FreezePredictions.value == True: self.operator.FreezePredictions.setValue(False) self.operator.FreezePredictions.setValue(True) #self.operator.PredictionsFromDisk.resize(len(group)) if len(group.keys()) > 0: assert len(group.keys()) == len( self.operator.PredictionsFromDisk ), "Expected to find the same number of on-disk predications as there are images loaded." else: for slot in self.operator.PredictionsFromDisk: slot.disconnect() for imageIndex, datasetName in enumerate(group.keys()): opStreamer = OpStreamingHdf5Reader(graph=self.operator.graph, parent=self.operator.parent) opStreamer.Hdf5File.setValue(group) opStreamer.InternalPath.setValue(datasetName) self.operator.PredictionsFromDisk[imageIndex].connect( opStreamer.OutputImage)
class AppletSerializer(object): """ Base class for all AppletSerializers. """ # Force subclasses to override abstract methods and properties __metaclass__ = ABCMeta base_initialized = False # override if necessary version = "0.1" class IncompatibleProjectVersionError(Exception): pass ######################### # Semi-abstract methods # ######################### def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath): """Child classes should override this function, if necessary. """ pass def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath, headless = False): """Child classes should override this function, if necessary. """ pass ############################# # Base class implementation # ############################# def __init__(self, topGroupName, slots=None, operator=None): """Constructor. Subclasses must call this method in their own __init__ functions. If they fail to do so, the shell raises an exception. Parameters: :param topGroupName: name of this applet's data group in the file. Defaults to the name of the operator. :param slots: a list of SerialSlots """ self.progressSignal = SimpleSignal() # Signature: emit(percentComplete) self.base_initialized = True self.topGroupName = topGroupName self.serialSlots = maybe(slots, []) self.operator = operator self.caresOfHeadless = False # should _deserializeFromHdf5 should be called with headless-argument? self._ignoreDirty = False def isDirty(self): """Returns true if the current state of this item (in memory) does not match the state of the HDF5 group on disk. Subclasses only need override this method if ORing the flags is not enough. """ return any(list(ss.dirty for ss in self.serialSlots)) def shouldSerialize(self, hdf5File): """Whether to serialize or not.""" if self.isDirty(): return True # Need to check if slots should be serialized. First must verify that self.topGroupName is not an empty string # (as this seems to happen sometimes). if self.topGroupName: topGroup = getOrCreateGroup(hdf5File, self.topGroupName) return any([ss.shouldSerialize(topGroup) for ss in self.serialSlots]) return False @property def ignoreDirty(self): return self._ignoreDirty @ignoreDirty.setter def ignoreDirty(self, value): self._ignoreDirty = value for ss in self.serialSlots: ss.ignoreDirty = value def progressIncrement(self, group=None): """Get the percentage progress for each slot. :param group: If None, all all slots are assumed to be processed. Otherwise, decides for each slot by calling slot.shouldSerialize(group). """ if group is None: nslots = len(self.serialSlots) else: nslots = sum(ss.shouldSerialize(group) for ss in self.serialSlots) if nslots == 0: return 0 return divmod(100, nslots)[0] def serializeToHdf5(self, hdf5File, projectFilePath): """Serialize the current applet state to the given hdf5 file. Subclasses should **not** override this method. Instead, subclasses override the 'private' version, *_serializetoHdf5* :param hdf5File: An h5py.File handle to the project file, which should already be open :param projectFilePath: The path to the given file handle. (Most serializers do not use this parameter.) """ topGroup = getOrCreateGroup(hdf5File, self.topGroupName) progress = 0 self.progressSignal.emit(progress) # Set the version key = 'StorageVersion' deleteIfPresent(topGroup, key) topGroup.create_dataset(key, data=self.version) try: inc = self.progressIncrement(topGroup) for ss in self.serialSlots: ss.serialize(topGroup) progress += inc self.progressSignal.emit(progress) # Call the subclass to do remaining work, if any self._serializeToHdf5(topGroup, hdf5File, projectFilePath) finally: self.progressSignal.emit(100) def deserializeFromHdf5(self, hdf5File, projectFilePath, headless = False): """Read the the current applet state from the given hdf5File handle, which should already be open. Subclasses should **not** override this method. Instead, subclasses override the 'private' version, *_deserializeFromHdf5* :param hdf5File: An h5py.File handle to the project file, which should already be open :param projectFilePath: The path to the given file handle. (Most serializers do not use this parameter.) :param headless: Are we called in headless mode? (in headless mode corrupted files cannot be fixed via the GUI) """ self.progressSignal.emit(0) # If the top group isn't there, call initWithoutTopGroup try: topGroup = hdf5File[self.topGroupName] groupVersion = topGroup['StorageVersion'][()] except KeyError: topGroup = None groupVersion = None try: if topGroup is not None: inc = self.progressIncrement() for ss in self.serialSlots: ss.deserialize(topGroup) self.progressSignal.emit(inc) # Call the subclass to do remaining work if self.caresOfHeadless: self._deserializeFromHdf5(topGroup, groupVersion, hdf5File, projectFilePath, headless) else: self._deserializeFromHdf5(topGroup, groupVersion, hdf5File, projectFilePath) else: self.initWithoutTopGroup(hdf5File, projectFilePath) finally: self.progressSignal.emit(100) def repairFile(self,path,filt = None): """get new path to lost file""" from PyQt4.QtGui import QFileDialog,QMessageBox from volumina.utility import encode_from_qstring text = "The file at {} could not be found any more. Do you want to search for it at another directory?".format(path) logger.info(text) c = QMessageBox.critical(None, "update external data",text, QMessageBox.Ok | QMessageBox.Cancel) if c == QMessageBox.Cancel: raise RuntimeError("Could not find external data: " + path) options = QFileDialog.Options() if ilastik_config.getboolean("ilastik", "debug"): options |= QFileDialog.DontUseNativeDialog fileName = QFileDialog.getOpenFileName( None, "repair files", path, filt, options=options) if fileName.isEmpty(): raise RuntimeError("Could not find external data: " + path) else: return encode_from_qstring(fileName) ####################### # Optional methods # ####################### def initWithoutTopGroup(self, hdf5File, projectFilePath): """Optional override for subclasses. Called when there is no top group to deserialize. """ pass def updateWorkingDirectory(self,newdir,olddir): """Optional override for subclasses. Called when the working directory is changed and relative paths have to be updated. Child Classes should overwrite this method if they store relative paths.""" pass
class AppletSerializer(object): __metaclass__ = ABCMeta # Force subclasses to override abstract methods and properties _base_initialized = False #################### # Abstract methods # #################### @abstractmethod def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath): """ Write the applet's data to hdf5. Args: topGroup -- The hdf5Group object this serializer is responsible for hdf5File -- An hdf5File object (already open) projectFilePath -- The path to the project file (a string) (Most serializers don't need to use this parameter) """ raise NotImplementedError @abstractmethod def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath): """ Read the applet's data from hdf5. Args: topGroup -- The hdf5Group object this serializer is responsible for hdf5File -- An hdf5File object (already open) projectFilePath -- The path to the project file (a string) (Most serializers don't need to use this parameter) """ raise NotImplementedError @abstractmethod def isDirty(self): """ Return true if the current state of this item (in memory) does not match the state of the HDF5 group on disk. SerializableItems are responsible for tracking their own dirty/notdirty state """ raise NotImplementedError @abstractmethod def unload(self): """ Called if either (1) the user closed the project or (2) the project opening process needs to be aborted for some reason (e.g. not all items could be deserialized properly due to a corrupted ilp) This way we can avoid invalid state due to a partially loaded project. """ raise NotImplementedError ####################### # Optional methods # ####################### def initWithoutTopGroup(self, hdf5File, projectFilePath): """ Optional override for subclasses. Called when there is no top group to deserialize Gives the applet a chance to inspect the hdf5File or project path, even though no top group is present in the file. """ pass ####################### # Convenience methods # ####################### @classmethod def getOrCreateGroup(cls, parentGroup, groupName): """ Convenience helper. Returns parentGorup[groupName], creating first it if necessary. """ try: return parentGroup[groupName] except KeyError: return parentGroup.create_group(groupName) @classmethod def deleteIfPresent(cls, parentGroup, name): """ Convenience helper. Deletes parentGorup[groupName], if it exists. """ try: del parentGroup[name] except KeyError: pass @property def version(self): """ Return the version of the serializer itself. """ return self._version @property def topGroupName(self): return self._topGroupName ############################# # Base class implementation # ############################# def __init__(self, topGroupName, version): self._version = version self._topGroupName = topGroupName self.progressSignal = SimpleSignal() # Signature: emit(percentComplete) self._base_initialized = True def serializeToHdf5(self, hdf5File, projectFilePath): # Check the overall file version ilastikVersion = hdf5File["ilastikVersion"].value # Make sure we can find our way around the project tree if not VersionManager.isProjectFileVersionCompatible(ilastikVersion): return self.progressSignal.emit(0) topGroup = self.getOrCreateGroup(hdf5File, self.topGroupName) # Set the version if 'StorageVersion' not in topGroup.keys(): topGroup.create_dataset('StorageVersion', data=self._version) else: topGroup['StorageVersion'][()] = self._version try: # Call the subclass to do the actual work self._serializeToHdf5(topGroup, hdf5File, projectFilePath) finally: self.progressSignal.emit(100) def deserializeFromHdf5(self, hdf5File, projectFilePath): # Check the overall file version ilastikVersion = hdf5File["ilastikVersion"].value # Make sure we can find our way around the project tree if not VersionManager.isProjectFileVersionCompatible(ilastikVersion): return self.progressSignal.emit(0) # If the top group isn't there, call initWithoutTopGroup try: topGroup = hdf5File[self.topGroupName] groupVersion = topGroup['StorageVersion'][()] except KeyError: topGroup = None groupVersion = None try: if topGroup is not None: # Call the subclass to do the actual work self._deserializeFromHdf5(topGroup, groupVersion, hdf5File, projectFilePath) else: self.initWithoutTopGroup(hdf5File, projectFilePath) finally: self.progressSignal.emit(100) @property def base_initialized(self): """ Do not override this property. Used by the shell to ensure that Applet.__init__ was called by your subclass. """ return self._base_initialized