def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
        with Tracer(traceLogger):

            numSteps = sum(self._dirtyFlags.values())
            progress = 0
            if numSteps > 0:
                increment = 100 / numSteps

            if self._dirtyFlags[Section.Labels]:
                self._serializeLabels(topGroup)
                progress += increment
                self.progressSignal.emit(progress)

            if self._dirtyFlags[Section.Classifier]:
                self._serializeClassifier(topGroup)
                progress += increment
                self.progressSignal.emit(progress)

            # Need to call serialize predictions even if it isn't dirty
            # (Since it isn't always stored.)
            self._serializePredictions(topGroup, progress,
                                       progress + increment)
            if self._dirtyFlags[Section.Predictions]:
                progress += increment
                self.progressSignal.emit(progress)
    def importStackAsLocalDataset(self, info):
        """
        Add the given stack data to the project file as a local dataset.
        Create a datainfo and append it to our operator.
        """
        with Tracer(traceLogger):

            try:
                self.progressSignal.emit(0)

                projectFileHdf5 = self.mainOperator.ProjectFile.value
                topGroup = self.getOrCreateGroup(projectFileHdf5,
                                                 self.topGroupName)
                localDataGroup = self.getOrCreateGroup(topGroup, 'local_data')

                globstring = info.filePath
                info.location = DatasetInfo.Location.ProjectInternal

                opWriter = OpStackToH5Writer(graph=self.mainOperator.graph)
                opWriter.hdf5Group.setValue(localDataGroup)
                opWriter.hdf5Path.setValue(info.datasetId)
                opWriter.GlobString.setValue(globstring)

                # Forward progress from the writer directly to our applet
                opWriter.progressSignal.subscribe(self.progressSignal.emit)

                success = opWriter.WriteImage.value

                numDatasets = len(self.mainOperator.Dataset)
                self.mainOperator.Dataset.resize(numDatasets + 1)
                self.mainOperator.Dataset[numDatasets].setValue(info)
            finally:
                self.progressSignal.emit(100)

            return success
    def _deserializeClassifier(self, topGroup):
        with Tracer(traceLogger):
            try:
                classifierGroup = topGroup['ClassifierForests']
            except KeyError:
                pass
            else:
                # Due to non-shared hdf5 dlls, vigra can't read directly from our open hdf5 group.
                # Instead, we'll copy the classfier data to a temporary file and give it to vigra.
                tmpDir = tempfile.mkdtemp()
                cachePath = os.path.join(tmpDir, 'tmp_classifier_cache.h5')
                with h5py.File(cachePath, 'w') as cacheFile:
                    cacheFile.copy(classifierGroup, 'ClassifierForests')

                forests = []
                for name, forestGroup in sorted(classifierGroup.items()):
                    forests.append(
                        vigra.learning.RandomForest(
                            cachePath, str('ClassifierForests/' + name)))

                os.remove(cachePath)
                os.removedirs(tmpDir)

                # Now force the classifier into our classifier cache.
                # The downstream operators (e.g. the prediction operator) can use the classifier without inducing it to be re-trained.
                # (This assumes that the classifier we are loading is consistent with the images and labels that we just loaded.
                #  As soon as training input changes, it will be retrained.)
                self.mainOperator.classifier_cache.forceValue(
                    numpy.array(forests))
            finally:
                self._dirtyFlags[Section.Classifier] = False
Exemplo n.º 4
0
 def __init__(self, mainOperator):
     """
     """
     with Tracer(traceLogger):
         self.mainOperator = mainOperator
         super(ThresholdMaskingGui, self).__init__(self.mainOperator)
         self.handleThresholdGuiValuesChanged(0, 255)
Exemplo n.º 5
0
    def addFileNames(self, fileNames):
        """
        Add the given filenames to both the GUI table and the top-level operator inputs.
        """
        with Tracer(traceLogger):
            # Allocate additional subslots in the operator inputs.
            oldNumFiles = len(self.mainOperator.Dataset)
            self.mainOperator.Dataset.resize(oldNumFiles + len(fileNames))

            # Assign values to the new inputs we just allocated.
            # The GUI will be updated by callbacks that are listening to slot changes
            for i, filePath in enumerate(fileNames):
                datasetInfo = DatasetInfo()
                cwd = self.mainOperator.WorkingDirectory.value
                absPath, relPath = getPathVariants(filePath, cwd)

                # Relative by default, unless the file is in a totally different tree from the working directory.
                if len(os.path.commonprefix([cwd, absPath])) > 1:
                    datasetInfo.filePath = relPath
                else:
                    datasetInfo.filePath = absPath

                # Allow labels by default if this gui isn't being used for batch data.
                datasetInfo.allowLabels = (self.guiMode == GuiMode.Normal)

                self.mainOperator.Dataset[i +
                                          oldNumFiles].setValue(datasetInfo)
Exemplo n.º 6
0
    def initCentralUic(self):
        """
        Load the GUI from the ui file into this class and connect it with event handlers.
        """
        with Tracer(traceLogger):
            # Load the ui file into this class (find it in our own directory)
            localDir = os.path.split(__file__)[0] + '/'
            uic.loadUi(localDir + "/dataSelection.ui", self)

            self.fileInfoTableWidget.resizeRowsToContents()
            self.fileInfoTableWidget.resizeColumnsToContents()
            self.fileInfoTableWidget.setAlternatingRowColors(True)
            self.fileInfoTableWidget.setShowGrid(False)
            self.fileInfoTableWidget.horizontalHeader().setResizeMode(
                0, QHeaderView.Interactive)

            self.fileInfoTableWidget.horizontalHeader().resizeSection(
                Column.Name, 200)
            self.fileInfoTableWidget.horizontalHeader().resizeSection(
                Column.Location, 250)
            self.fileInfoTableWidget.horizontalHeader().resizeSection(
                Column.InternalID, 100)

            if self.guiMode == GuiMode.Batch:
                # It doesn't make sense to provide a labeling option in batch mode
                self.fileInfoTableWidget.removeColumn(Column.LabelsAllowed)

            self.fileInfoTableWidget.verticalHeader().hide()

            # Set up handlers
            self.fileInfoTableWidget.itemSelectionChanged.connect(
                self.handleTableSelectionChange)
    def __init__(self, mainOperator, projectFileGroupName):
        with Tracer(traceLogger):
            super(PixelClassificationSerializer,
                  self).__init__(projectFileGroupName, self.SerializerVersion)
            self.mainOperator = mainOperator
            self._initDirtyFlags()

            # Set up handlers for dirty detection
            def handleDirty(section):
                self._dirtyFlags[section] = True

            self.mainOperator.Classifier.notifyDirty(
                bind(handleDirty, Section.Classifier))

            def handleNewImage(section, slot, index):
                slot[index].notifyDirty(bind(handleDirty, section))

            # These are multi-slots, so subscribe to dirty callbacks on each of their subslots as they are created
            self.mainOperator.LabelImages.notifyInserted(
                bind(handleNewImage, Section.Labels))
            self.mainOperator.PredictionProbabilities.notifyInserted(
                bind(handleNewImage, Section.Predictions))

            self._predictionStorageEnabled = False
            self._predictionStorageRequest = None
            self._predictionsPresent = False
Exemplo n.º 8
0
    def initAppletDrawerUi(self):
        with Tracer(traceLogger):
            # Load the ui file (find it in our own directory)
            localDir = os.path.split(__file__)[0]
            self._drawer = uic.loadUi(localDir+"/drawer.ui")
            
            layout = QVBoxLayout( self )
            layout.setSpacing(0)
            self._drawer.setLayout( layout )
    
            thresholdWidget = ThresholdingWidget(self)
            thresholdWidget.valueChanged.connect( self.handleThresholdGuiValuesChanged )
            layout.addWidget( thresholdWidget )
            
            def updateDrawerFromOperator():
                minValue, maxValue = (0,255)

                if self.mainOperator.MinValue.ready():
                    minValue = self.mainOperator.MinValue.value
                if self.mainOperator.MaxValue.ready():
                    maxValue = self.mainOperator.MaxValue.value

                thresholdWidget.setValue(minValue, maxValue)                
                
            self.mainOperator.MinValue.notifyDirty( bind(updateDrawerFromOperator) )
            self.mainOperator.MaxValue.notifyDirty( bind(updateDrawerFromOperator) )
Exemplo n.º 9
0
 def setupLayers(self, currentImageIndex):
     with Tracer(traceLogger):
         layers = []
 
         # Show the thresholded data
         outputImageSlot = self.mainOperator.Output[ currentImageIndex ]
         if outputImageSlot.ready():
             outputLayer = self.createStandardLayerFromSlot( outputImageSlot )
             outputLayer.name = "min <= x <= max"
             outputLayer.visible = True
             outputLayer.opacity = 0.75
             layers.append(outputLayer)
         
         # Show the  data
         invertedOutputSlot = self.mainOperator.InvertedOutput[ currentImageIndex ]
         if invertedOutputSlot.ready():
             invertedLayer = self.createStandardLayerFromSlot( invertedOutputSlot )
             invertedLayer.name = "(x < min) U (x > max)"
             invertedLayer.visible = True
             invertedLayer.opacity = 0.25
             layers.append(invertedLayer)
         
         # Show the raw input data
         inputImageSlot = self.mainOperator.InputImage[ currentImageIndex ]
         if inputImageSlot.ready():
             inputLayer = self.createStandardLayerFromSlot( inputImageSlot )
             inputLayer.name = "Raw Input"
             inputLayer.visible = True
             inputLayer.opacity = 1.0
             layers.append(inputLayer)
 
         return layers
Exemplo n.º 10
0
 def exportResultsForSlot(self, slot, progressSlot):
     with Tracer(traceLogger):
         # Do this in a separate thread so the UI remains responsive
         exportThread = threading.Thread(target=bind(
             self.exportSlots, [slot], [progressSlot]),
                                         name="BatchIOExportThread")
         exportThread.start()
Exemplo n.º 11
0
 def handleLayerRemoval(self, slot, slotIndex):
     """
     An item is about to be removed from the multislot that is providing our layers.
     Remove the layer from the GUI.
     """
     with Tracer(traceLogger):
         self.updateAllLayers()
Exemplo n.º 12
0
 def getSlotIndex(self, multislot, subslot):
     with Tracer(traceLogger):
         # Which index is this slot?
         for index, slot in enumerate(multislot):
             if slot == subslot:
                 return index
         return -1
    def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File,
                             projectFilePath):
        with Tracer(traceLogger):
            self._projectFilePath = projectFilePath
            self.initWithoutTopGroup(hdf5File, projectFilePath)

            infoDir = topGroup['infos']

            self.mainOperator.Dataset.resize(len(infoDir))
            for index, (infoGroupName,
                        infoGroup) in enumerate(sorted(infoDir.items())):
                datasetInfo = DatasetInfo()

                # Make a reverse-lookup of the location storage strings
                LocationLookup = {
                    v: k
                    for k, v in self.LocationStrings.items()
                }
                datasetInfo.location = LocationLookup[str(
                    infoGroup['location'].value)]

                # Write to the 'private' members to avoid resetting the dataset id
                datasetInfo._filePath = str(infoGroup['filePath'].value)
                datasetInfo._datasetId = str(infoGroup['datasetId'].value)

                # Deserialize the "allow labels" flag
                try:
                    datasetInfo.allowLabels = infoGroup['allowLabels'].value
                except KeyError:
                    pass

                # Deserialize the axisorder (if present)
                try:
                    datasetInfo.axisorder = infoGroup['axisorder'].value
                except KeyError:
                    if ilastik.utility.globals.ImportOptions.default_axis_order is not None:
                        datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order

                # If the data is supposed to be in the project,
                #  check for it now.
                if datasetInfo.location == DatasetInfo.Location.ProjectInternal:
                    if not datasetInfo.datasetId in topGroup[
                            'local_data'].keys():
                        raise RuntimeError(
                            "Corrupt project file.  Could not find data for " +
                            infoGroupName)

                # If the data is supposed to exist outside the project, make sure it really does.
                if datasetInfo.location == DatasetInfo.Location.FileSystem:
                    filePath = PathComponents(
                        datasetInfo.filePath,
                        os.path.split(projectFilePath)[0]).externalPath
                    if not os.path.exists(filePath):
                        raise RuntimeError("Could not find external data: " +
                                           filePath)

                # Give the new info to the operator
                self.mainOperator.Dataset[index].setValue(datasetInfo)

            self._dirty = False
Exemplo n.º 14
0
    def handleAppletProgressImpl(self, index, percentage, cancelled):
        # No need for locking; this function is always run from the GUI thread
        with Tracer(traceLogger,
                    msg="from applet {}: {}%, cancelled={}".format(
                        index, percentage, cancelled)):
            if cancelled:
                if index in self.appletPercentages.keys():
                    del self.appletPercentages[index]
            else:
                # Take max (never go back down)
                if index in self.appletPercentages:
                    oldPercentage = self.appletPercentages[index]
                    self.appletPercentages[index] = max(
                        percentage, oldPercentage)
                # First percentage we get MUST be zero.
                # Other notifications are ignored.
                if index in self.appletPercentages or percentage == 0:
                    self.appletPercentages[index] = percentage

            numActive = len(self.appletPercentages)
            if numActive > 0:
                totalPercentage = numpy.sum(
                    self.appletPercentages.values()) / numActive

            if numActive == 0 or totalPercentage == 100:
                if self.progressBar is not None:
                    self.statusBar.removeWidget(self.progressBar)
                    self.progressBar = None
                    self.appletPercentages.clear()
            else:
                if self.progressBar is None:
                    self.progressBar = QProgressBar()
                    self.statusBar.addWidget(self.progressBar)
                self.progressBar.setValue(totalPercentage)
    def _serializeLabels(self, topGroup):
        with Tracer(traceLogger):
            # Delete all labels from the file
            self.deleteIfPresent(topGroup, 'LabelSets')
            labelSetDir = topGroup.create_group('LabelSets')

            numImages = len(self.mainOperator.NonzeroLabelBlocks)
            for imageIndex in range(numImages):
                # Create a group for this image
                labelGroupName = 'labels{:03d}'.format(imageIndex)
                labelGroup = labelSetDir.create_group(labelGroupName)

                # Get a list of slicings that contain labels
                nonZeroBlocks = self.mainOperator.NonzeroLabelBlocks[
                    imageIndex].value
                for blockIndex, slicing in enumerate(nonZeroBlocks):
                    # Read the block from the label output
                    block = self.mainOperator.LabelImages[imageIndex][
                        slicing].wait()

                    # Store the block as a new dataset
                    blockName = 'block{:04d}'.format(blockIndex)
                    labelGroup.create_dataset(blockName, data=block)

                    # Add the slice this block came from as an attribute of the dataset
                    labelGroup[blockName].attrs[
                        'blockSlice'] = self.slicingToString(slicing)

            self._dirtyFlags[Section.Labels] = False
Exemplo n.º 16
0
    def initCentralUic(self):
        """
        Load the GUI from the ui file into this class and connect it with event handlers.
        """
        with Tracer(traceLogger):
            # Load the ui file into this class (find it in our own directory)
            localDir = os.path.split(__file__)[0]
            uic.loadUi(localDir + "/batchIo.ui", self)

            self.tableWidget.resizeRowsToContents()
            self.tableWidget.resizeColumnsToContents()
            self.tableWidget.setAlternatingRowColors(True)
            self.tableWidget.setShowGrid(False)
            self.tableWidget.horizontalHeader().setResizeMode(
                0, QHeaderView.Interactive)

            self.tableWidget.horizontalHeader().resizeSection(
                Column.Dataset, 200)
            self.tableWidget.horizontalHeader().resizeSection(
                Column.ExportLocation, 250)
            self.tableWidget.horizontalHeader().resizeSection(
                Column.Action, 100)

            self.tableWidget.verticalHeader().hide()

            # Set up handlers
            self.tableWidget.itemSelectionChanged.connect(
                self.handleTableSelectionChange)
Exemplo n.º 17
0
    def initAppletDrawerUic(self):
        """
        Load the ui file for the applet drawer, which we own.
        """
        with Tracer(traceLogger):
            # Load the ui file (find it in our own directory)
            localDir = os.path.split(__file__)[0] + '/'
            # (We don't pass self here because we keep the drawer ui in a separate object.)
            self.drawer = uic.loadUi(localDir + "/batchIoDrawer.ui")

            # Set up our handlers
            self.drawer.saveWithInputButton.toggled.connect(
                self.handleExportLocationOptionChanged)
            self.drawer.saveToDirButton.toggled.connect(
                self.handleExportLocationOptionChanged)

            self.drawer.outputDirChooseButton.clicked.connect(
                self.chooseNewExportDirectory)
            self.drawer.outputSuffixEdit.textEdited.connect(
                self.handleNewOutputSuffix)

            self.drawer.exportAllButton.clicked.connect(self.exportAllResults)
            self.drawer.deleteAllButton.clicked.connect(self.deleteAllResults)

            for i, formatInfo in sorted(SupportedFormats.items()):
                self.drawer.exportFormatCombo.addItem(formatInfo.name + ' (' +
                                                      formatInfo.extension +
                                                      ')')
            self.drawer.exportFormatCombo.currentIndexChanged.connect(
                partial(self.handleExportFormatChanged))
    def _serializeClassifier(self, topGroup):
        with Tracer(traceLogger):
            self.deleteIfPresent(topGroup, 'ClassifierForests')
            self._dirtyFlags[Section.Classifier] = False

            if not self.mainOperator.Classifier.ready():
                return

            classifier_forests = self.mainOperator.Classifier.value

            # Classifier can be None if there isn't any training data yet.
            if classifier_forests is None:
                return
            for forest in classifier_forests:
                if forest is None:
                    return

            # Due to non-shared hdf5 dlls, vigra can't write directly to our open hdf5 group.
            # Instead, we'll use vigra to write the classifier to a temporary file.
            tmpDir = tempfile.mkdtemp()
            cachePath = os.path.join(tmpDir, 'tmp_classifier_cache.h5')
            for i, forest in enumerate(classifier_forests):
                forest.writeHDF5(cachePath,
                                 'ClassifierForests/Forest{:04d}'.format(i))

            # Open the temp file and copy to our project group
            with h5py.File(cachePath, 'r') as cacheFile:
                topGroup.copy(cacheFile['ClassifierForests'],
                              'ClassifierForests')

            os.remove(cachePath)
            os.removedirs(tmpDir)
Exemplo n.º 19
0
 def exportAllResults(self):
     with Tracer(traceLogger):
         # Do this in a separate thread so the UI remains responsive
         exportThread = threading.Thread(target=bind(
             self.exportSlots, self.mainOperator.ExportResult,
             self.mainOperator.ProgressSignal),
                                         name="BatchIOExportThread")
         exportThread.start()
 def unload(self):
     with Tracer(traceLogger):
         """ Called if either
             (1) the user closed the project or
             (2) the project opening process needs to be aborted for some reason
                 (e.g. not all items could be deserialized properly due to a corrupted ilp)
             This way we can avoid invalid state due to a partially loaded project. """
         self.mainOperator.Dataset.resize(0)
Exemplo n.º 21
0
 def handleShellRequest(self, applet_index, requestAction):
     """
     An applet is asking us to do something.  Handle the request.
     """
     with Tracer(traceLogger):
         if requestAction == ShellRequest.RequestSave:
             # Call the handler directly to ensure this is a synchronous call (not queued to the GUI thread)
             self.projectManager.saveProject()
Exemplo n.º 22
0
    def handleNewOutputSuffix(self):
        with Tracer(traceLogger):
            suffix = str(self.drawer.outputSuffixEdit.text())

            self.mainOperator.Suffix.setValue(suffix)

            # Update every row of the GUI
            for index, slot in enumerate(self.mainOperator.OutputDataPath):
                self.updateTableForSlot(slot)
Exemplo n.º 23
0
 def handleLayerInsertion(self, slot, slotIndex):
     """
     The multislot providing our layers has a new item.
     Make room for it in the layer GUI and subscribe to updates.
     """
     with Tracer(traceLogger):
         # When the slot is ready, we'll replace the blank layer with real data
         slot[slotIndex].notifyReady(bind(self.updateAllLayers))
         slot[slotIndex].notifyUnready(bind(self.updateAllLayers))
Exemplo n.º 24
0
            def handleNewDataset(multislot, index):
                with Tracer(traceLogger):
                    assert multislot == self.mainOperator.Dataset
                    # Make room in the table
                    self.fileInfoTableWidget.insertRow(index)

                    # Update the table row data when this slot has new data
                    # We can't bind in the row here because the row may change in the meantime.
                    self.mainOperator.Dataset[index].notifyDirty(
                        self.updateTableForSlot)
    def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File,
                             projectFilePath):
        with Tracer(traceLogger):
            self.progressSignal.emit(0)
            self._deserializeLabels(topGroup)
            self.progressSignal.emit(50)
            self._deserializeClassifier(topGroup)
            self._deserializePredictions(topGroup)

            self.progressSignal.emit(100)
Exemplo n.º 26
0
    def handleAllowLabelsCheckbox(self, slot, checked):
        """
        The user (un)checked the "allow labels" checkbox in one of the table rows.
        Update the corresponding dataset info in the operator (which is given in the parameter 'slot')
        """
        with Tracer(traceLogger):
            # COPY the dataset so we trigger the slot to be dirty
            newDatasetInfo = copy.copy(slot.value)
            newDatasetInfo.allowLabels = (checked == Qt.Checked)

            # Only update if necessary
            if newDatasetInfo != slot.value:
                slot.setValue(newDatasetInfo)
    def deserializeFromHdf5(self, hdf5File, projectFilePath):
        with Tracer(traceLogger):
            # Check the overall file version
            ilastikVersion = hdf5File["ilastikVersion"].value

            # This is the v0.5 import deserializer.  Don't work with 0.6 projects (or anything else).
            if ilastikVersion != 0.5:
                return

            # The 'working directory' for the purpose of constructing absolute
            #  paths from relative paths is the project file's directory.
            projectDir = os.path.split(projectFilePath)[0]
            self.mainOperator.WorkingDirectory.setValue(projectDir)

            # These project file inputs are required, but are not used because the data is treated as "external"
            self.mainOperator.ProjectDataGroup.setValue('DataSets')
            self.mainOperator.ProjectFile.setValue(hdf5File)

            # Access the top group and the info group
            try:
                #dataset = hdf5File["DataSets"]["dataItem00"]["data"]
                dataDir = hdf5File["DataSets"]
            except KeyError:
                # If our group (or subgroup) doesn't exist, then make sure the operator is empty
                self.mainOperator.Dataset.resize(0)
                return

            self.mainOperator.Dataset.resize(len(dataDir))
            for index, (datasetDirName,
                        datasetDir) in enumerate(sorted(dataDir.items())):
                datasetInfo = DatasetInfo()

                # Since we are importing from a 0.5 file, all datasets will be external
                #  to the project (pulled in from the old file as hdf5 datasets)
                datasetInfo.location = DatasetInfo.Location.FileSystem

                # Some older versions of ilastik 0.5 stored the data in tzyxc order.
                # Some power-users can enable a command-line flag that tells us to
                #  transpose the data back to txyzc order when we import the old project.
                if ilastik.utility.globals.ImportOptions.default_axis_order is not None:
                    datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order

                # Write to the 'private' members to avoid resetting the dataset id
                totalDatasetPath = projectFilePath + '/DataSets/' + datasetDirName + '/data'
                datasetInfo._filePath = str(totalDatasetPath)
                datasetInfo._datasetId = datasetDirName  # Use the old dataset name as the new dataset id

                # Give the new info to the operator
                self.mainOperator.Dataset[index].setValue(datasetInfo)
Exemplo n.º 28
0
    def handleExportLocationOptionChanged(self):
        """
        The user has changed the export directory option (radio buttons).
        """
        with Tracer(traceLogger):
            saveWithInput = self.drawer.saveWithInputButton.isChecked()
            if saveWithInput:
                # Set to '', which means export data is stored in the input data directory
                self.mainOperator.ExportDirectory.setValue('')
            else:
                self.mainOperator.ExportDirectory.setValue(
                    self.chosenExportDirectory)

            for index, slot in enumerate(self.mainOperator.OutputDataPath):
                self.updateTableForSlot(slot)
Exemplo n.º 29
0
    def exportSlots(self, slotList, progressSignalSlotList):
        with Tracer(traceLogger):
            try:
                # Don't let anyone change the classifier while we're exporting...
                self.guiControlSignal.emit(
                    ilastik.applets.base.applet.ControlCommand.DisableUpstream)

                # Also disable this applet's controls
                self.guiControlSignal.emit(
                    ilastik.applets.base.applet.ControlCommand.DisableSelf)

                # Start with 1% so the progress bar shows up
                self.progressSignal.emit(1)

                def signalFileProgress(slotIndex, percent):
                    self.progressSignal.emit(
                        (100 * slotIndex + percent) / len(slotList))

                for i, slot in enumerate(slotList):
                    logger.debug("Exporting result {}".format(i))

                    # If the operator provides a progress signal, use it.
                    slotProgressSignal = progressSignalSlotList[i].value
                    slotProgressSignal.subscribe(partial(
                        signalFileProgress, i))

                    result = slot.value
                    if not result:
                        logger.error("Failed to export an image.")

                    # We're finished with this file.
                    self.progressSignal.emit(100 * (i + 1) /
                                             float(len(slotList)))

                # Ensure the shell knows we're really done.
                self.progressSignal.emit(100)
            except:
                # Cancel our progress.
                self.progressSignal.emit(0, True)
                raise
            finally:
                # Now that we're finished, it's okay to use the other applets again.
                self.guiControlSignal.emit(
                    ilastik.applets.base.applet.ControlCommand.Pop
                )  # Enable ourselves
                self.guiControlSignal.emit(
                    ilastik.applets.base.applet.ControlCommand.Pop
                )  # Enable the others we disabled
Exemplo n.º 30
0
    def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File,
                             projectFilePath):
        with Tracer(traceLogger):
            try:
                scales = topGroup['Scales'].value
                featureIds = topGroup['FeatureIds'].value
            except KeyError:
                pass
            else:
                self.mainOperator.Scales.setValue(scales)

                # If the main operator already has a feature ordering (provided by the GUI),
                # then don't overwrite it.  We'll re-order the matrix to match the existing ordering.
                if not self.mainOperator.FeatureIds.ready():
                    self.mainOperator.FeatureIds.setValue(featureIds)

                # If the matrix isn't there, just return
                try:
                    savedMatrix = topGroup['SelectionMatrix'].value
                    # Check matrix dimensions
                    assert savedMatrix.shape[0] == len(
                        featureIds
                    ), "Invalid project data: feature selection matrix dimensions don't make sense"
                    assert savedMatrix.shape[1] == len(
                        scales
                    ), "Invalid project data: feature selection matrix dimensions don't make sense"
                except KeyError:
                    pass
                else:
                    # If the feature order has changed since this project was last saved,
                    #  then we need to re-order the features.
                    # The 'new' order is provided by the operator
                    newFeatureOrder = list(self.mainOperator.FeatureIds.value)

                    newMatrixShape = (len(newFeatureOrder), len(scales))
                    newMatrix = numpy.zeros(newMatrixShape, dtype=bool)
                    for oldFeatureIndex, featureId in enumerate(featureIds):
                        newFeatureIndex = newFeatureOrder.index(featureId)
                        newMatrix[newFeatureIndex] = savedMatrix[
                            oldFeatureIndex]

                    self.mainOperator.SelectionMatrix.setValue(newMatrix)

            self._dirty = False