def createPixelClassificationApplet(self):
     """
     Can be overridden by subclasses, if they want to return their own type of PixelClassificationApplet.
     NOTE: The applet returned here must have the same interface as the regular PixelClassificationApplet.
           (If it looks like a duck...)
     """
     return PixelClassificationApplet(self, "PixelClassification")
예제 #2
0
    def setupInputs(self):
        data_instructions = 'Use the "Raw Data" tab on the right to load your intensity image(s).'
        
        self.dataSelectionApplet = DataSelectionApplet( self, 
                                                        "Input Data", 
                                                        "Input Data", 
                                                        batchDataGui=False,
                                                        forceAxisOrder=None, 
                                                        instructionText=data_instructions )
        opData = self.dataSelectionApplet.topLevelOperator
        opData.DatasetRoles.setValue(['Raw Data'])

        self.featureSelectionApplet = FeatureSelectionApplet(
            self,
            "Feature Selection",
            "FeatureSelections",
        )

        self.pcApplet = PixelClassificationApplet(
            self, "PixelClassification")
        self.thresholdingApplet = ThresholdTwoLevelsApplet(
            self, "Thresholding", "ThresholdTwoLevels")

        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.thresholdingApplet)

        if not self._headless:
            self._shell.currentAppletChanged.connect( self.handle_applet_changed )
    def setupInputs(self):
        data_instructions = 'Use the "Raw Data" tab on the right to load your intensity image(s).'
        
        self.dataSelectionApplet = DataSelectionApplet( self, 
                                                        "Input Data", 
                                                        "Input Data", 
                                                        batchDataGui=False,
                                                        forceAxisOrder=None, 
                                                        instructionText=data_instructions )
        opData = self.dataSelectionApplet.topLevelOperator
        opData.DatasetRoles.setValue(['Raw Data'])

        self.featureSelectionApplet = FeatureSelectionApplet(
            self,
            "Feature Selection",
            "FeatureSelections",
            filter_implementation=self.filter_implementation
        )

        self.pcApplet = PixelClassificationApplet(
            self, "PixelClassification")
        self.thresholdingApplet = ThresholdTwoLevelsApplet(
            self, "Thresholding", "ThresholdTwoLevels")

        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.thresholdingApplet)
예제 #4
0
    def __init__(self, headless, workflow_cmdline_args, appendBatchOperators=True, *args, **kwargs):
        # Create a graph to be shared by all operators
        graph = Graph()
        super( PixelClassificationWorkflow, self ).__init__( headless, graph=graph, *args, **kwargs )
        self._applets = []

        # Applets for training (interactive) workflow 
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(self, "Input Data", "Input Data", supportIlastik05Import=True, batchDataGui=False)
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetRoles.setValue( ['Raw Data'] )

        self.featureSelectionApplet = FeatureSelectionApplet(self, "Feature Selection", "FeatureSelections")
        self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        # Expose for shell
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)

        if appendBatchOperators:
            # Create applets for batch workflow
            self.batchInputApplet = DataSelectionApplet(self, "Batch Prediction Input Selections", "BatchDataSelection", supportIlastik05Import=False, batchDataGui=True)
            self.batchResultsApplet = PixelClassificationBatchResultsApplet(self, "Batch Prediction Output Locations")
    
            # Expose in shell        
            self._applets.append(self.batchInputApplet)
            self._applets.append(self.batchResultsApplet)
    
            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()
        def __init__(
            self,
            shell,
            headless,
            workflow_cmdline_args,
            project_creation_args,
            hintoverlayFile=None,
            pmapoverlayFile=None,
            *args,
            **kwargs,
        ):
            if workflow_cmdline_args:
                assert False, "Not using workflow cmdline args yet."

            if hintoverlayFile is not None:
                assert isinstance(hintoverlayFile, str), "hintoverlayFile should be a string, not '%s'" % type(
                    hintoverlayFile
                )
            if pmapoverlayFile is not None:
                assert isinstance(pmapoverlayFile, str), "pmapoverlayFile should be a string, not '%s'" % type(
                    pmapoverlayFile
                )

            graph = Graph()

            super(CarvingFromPixelPredictionsWorkflow, self).__init__(
                shell, headless, workflow_cmdline_args, project_creation_args, *args, graph=graph, **kwargs
            )

            ## Create applets
            self.dataSelectionApplet = DataSelectionApplet(
                self, "Input Data", "Input Data", supportIlastik05Import=True, batchDataGui=False
            )
            opDataSelection = self.dataSelectionApplet.topLevelOperator
            opDataSelection.DatasetRoles.setValue(["Raw Data"])

            self.featureSelectionApplet = FeatureSelectionApplet(self, "Feature Selection", "FeatureSelections")
            self.pixelClassificationApplet = PixelClassificationApplet(self, "PixelClassification")

            self.carvingApplet = CarvingApplet(
                workflow=self,
                projectFileGroupName="carving",
                hintOverlayFile=hintoverlayFile,
                pmapOverlayFile=pmapoverlayFile,
            )

            self.preprocessingApplet = PreprocessingApplet(
                workflow=self, title="Preprocessing", projectFileGroupName="carving"
            )

            # self.carvingApplet.topLevelOperator.MST.connect(self.preprocessingApplet.topLevelOperator.PreprocessedData)

            # Expose to shell
            self._applets = []
            self._applets.append(self.dataSelectionApplet)
            self._applets.append(self.featureSelectionApplet)
            self._applets.append(self.pixelClassificationApplet)
            self._applets.append(self.preprocessingApplet)
            self._applets.append(self.carvingApplet)
    def createInputApplets(self):
        super().createInputApplets()

        self.featureSelectionApplet = FeatureSelectionApplet(self, "Feature Selection", "FeatureSelections")

        self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        self.thresholdingApplet = ThresholdTwoLevelsApplet(self, "Thresholding", "ThresholdTwoLevels")

        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.thresholdingApplet)

        if not self._headless:
            self._shell.currentAppletChanged.connect(self.handle_applet_changed)
예제 #7
0
 def createPixelClassificationApplet(self, index=0):
     """
     Can be overridden by subclasses, if they want to return their own type of PixelClassificationApplet.
     NOTE: The applet returned here must have the same interface as the regular PixelClassificationApplet.
           (If it looks like a duck...)
     """
     # Make the first one compatible with the pixel classification workflow,
     # in case the user uses "Import Project"
     hdf5_group_name = 'PixelClassification'
     if index > 0:
         hdf5_group_name = "PixelClassification{index:02d}".format(index=index)
     applet = PixelClassificationApplet( self, hdf5_group_name )
     applet.topLevelOperator.name += '{}'.format(index)
     return applet
예제 #8
0
class PixelClassificationWorkflow(Workflow):
    
    workflowName = "Pixel Classification"
    workflowDescription = "This is obviously self-explanoratory."
    defaultAppletIndex = 1 # show DataSelection by default
    
    @property
    def applets(self):
        return self._applets

    @property
    def imageNameListSlot(self):
        return self.dataSelectionApplet.topLevelOperator.ImageName

    def __init__(self, headless, workflow_cmdline_args, appendBatchOperators=True, *args, **kwargs):
        # Create a graph to be shared by all operators
        graph = Graph()
        super( PixelClassificationWorkflow, self ).__init__( headless, graph=graph, *args, **kwargs )
        self._applets = []

        # Applets for training (interactive) workflow 
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(self, "Input Data", "Input Data", supportIlastik05Import=True, batchDataGui=False)
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetRoles.setValue( ['Raw Data'] )

        self.featureSelectionApplet = FeatureSelectionApplet(self, "Feature Selection", "FeatureSelections")
        self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        # Expose for shell
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)

        if appendBatchOperators:
            # Create applets for batch workflow
            self.batchInputApplet = DataSelectionApplet(self, "Batch Prediction Input Selections", "BatchDataSelection", supportIlastik05Import=False, batchDataGui=True)
            self.batchResultsApplet = PixelClassificationBatchResultsApplet(self, "Batch Prediction Output Locations")
    
            # Expose in shell        
            self._applets.append(self.batchInputApplet)
            self._applets.append(self.batchResultsApplet)
    
            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()
    
    def connectLane(self, laneIndex):
        # Get a handle to each operator
        opData = self.dataSelectionApplet.topLevelOperator.getLane(laneIndex)
        opTrainingFeatures = self.featureSelectionApplet.topLevelOperator.getLane(laneIndex)
        opClassify = self.pcApplet.topLevelOperator.getLane(laneIndex)
        if not self.pcApplet.enabled:
            self.pcApplet.guiControlSignal.emit(ControlCommand.DisableSelf)
        '''try:
            self.batchResultsApplet.guiControlSignal.emit(ControlCommand.DisableSelf)
        except NameError:
            pass'''
        # Input Image -> Feature Op
        #         and -> Classification Op (for display)
        opTrainingFeatures.InputImage.connect( opData.Image )
        opClassify.InputImages.connect( opData.Image )
        
        # Feature Images -> Classification Op (for training, prediction)
        opClassify.FeatureImages.connect( opTrainingFeatures.OutputImage )
        opClassify.CachedFeatureImages.connect( opTrainingFeatures.CachedOutputImage )
        
        # Training flags -> Classification Op (for GUI restrictions)
        opClassify.LabelsAllowedFlags.connect( opData.AllowLabels )
        
        
    def updateEnable(self,applet,en = True):
        if applet == "training":
            self.pcApplet.updateEnable(en)
        elif applet == "features":
            self.pcApplet.updateEnable(en)
        elif applet == "batch":
            self.batchResultsApplet.updateEnable(en)
        
    def _initBatchWorkflow(self):
        """
        Connect the batch-mode top-level operators to the training workflow and to eachother.
        """
        # Access applet operators from the training workflow
        opTrainingDataSelection = self.dataSelectionApplet.topLevelOperator
        opTrainingFeatures = self.featureSelectionApplet.topLevelOperator
        opClassify = self.pcApplet.topLevelOperator
        
        # Access the batch operators
        opBatchInputs = self.batchInputApplet.topLevelOperator
        opBatchResults = self.batchResultsApplet.topLevelOperator
        
        opBatchInputs.DatasetRoles.connect( opTrainingDataSelection.DatasetRoles )
        
        ## Create additional batch workflow operators
        opBatchFeatures = OperatorWrapper( OpFeatureSelection, operator_kwargs={'filter_implementation':'Original'}, parent=self, promotedSlotNames=['InputImage'] )
        opBatchPredictionPipeline = OperatorWrapper( OpPredictionPipeline, parent=self )
        
        ## Connect Operators ##
        opTranspose = OpTransposeSlots( parent=self )
        opTranspose.OutputLength.setValue(1)
        opTranspose.Inputs.connect( opBatchInputs.DatasetGroup )
        
        opFilePathProvider = OperatorWrapper(OpAttributeSelector, parent=self)
        opFilePathProvider.InputObject.connect( opTranspose.Outputs[0] )
        opFilePathProvider.AttributeName.setValue( 'filePath' )
        
        # Provide dataset paths from data selection applet to the batch export applet
        opBatchResults.DatasetPath.connect( opFilePathProvider.Result )
        
        # Connect (clone) the feature operator inputs from 
        #  the interactive workflow's features operator (which gets them from the GUI)
        opBatchFeatures.Scales.connect( opTrainingFeatures.Scales )
        opBatchFeatures.FeatureIds.connect( opTrainingFeatures.FeatureIds )
        opBatchFeatures.SelectionMatrix.connect( opTrainingFeatures.SelectionMatrix )
        
        # Classifier and LabelsCount are provided by the interactive workflow
        opBatchPredictionPipeline.Classifier.connect( opClassify.Classifier )
        opBatchPredictionPipeline.MaxLabel.connect( opClassify.MaxLabelValue )
        opBatchPredictionPipeline.FreezePredictions.setValue( False )
        
        # Provide these for the gui
        opBatchResults.RawImage.connect( opBatchInputs.Image )
        opBatchResults.PmapColors.connect( opClassify.PmapColors )
        opBatchResults.LabelNames.connect( opClassify.LabelNames )
        
        # Connect Image pathway:
        # Input Image -> Features Op -> Prediction Op -> Export
        opBatchFeatures.InputImage.connect( opBatchInputs.Image )
        opBatchPredictionPipeline.FeatureImages.connect( opBatchFeatures.OutputImage )
        opBatchResults.ImageToExport.connect( opBatchPredictionPipeline.HeadlessPredictionProbabilities )

        # We don't actually need the cached path in the batch pipeline.
        # Just connect the uncached features here to satisfy the operator.
        opBatchPredictionPipeline.CachedFeatureImages.connect( opBatchFeatures.OutputImage )

        self.opBatchPredictionPipeline = opBatchPredictionPipeline

    def getHeadlessOutputSlot(self, slotId):
        # "Regular" (i.e. with the images that the user selected as input data)
        if slotId == "Predictions":
            return self.pcApplet.topLevelOperator.HeadlessPredictionProbabilities
        elif slotId == "PredictionsUint8":
            return self.pcApplet.topLevelOperator.HeadlessUint8PredictionProbabilities
        # "Batch" (i.e. with the images that the user selected as batch inputs).
        elif slotId == "BatchPredictions":
            return self.opBatchPredictionPipeline.HeadlessPredictionProbabilities
        if slotId == "BatchPredictionsUint8":
            return self.opBatchPredictionPipeline.HeadlessUint8PredictionProbabilities
        
        raise Exception("Unknown headless output slot")
    def __init__(self,
                 shell,
                 headless,
                 workflow_cmdline_args,
                 project_creation_args,
                 appendBatchOperators=True,
                 *args,
                 **kwargs):
        # Create a graph to be shared by all operators
        graph = Graph()
        super(PixelClassificationWorkflow,
              self).__init__(shell,
                             headless,
                             workflow_cmdline_args,
                             project_creation_args,
                             graph=graph,
                             *args,
                             **kwargs)
        self._applets = []
        self._workflow_cmdline_args = workflow_cmdline_args

        data_instructions = "Select your input data using the 'Raw Data' tab shown on the right"

        # Parse workflow-specific command-line args
        parser = argparse.ArgumentParser()
        parser.add_argument('--filter',
                            help="pixel feature filter implementation.",
                            choices=['Original', 'Refactored', 'Interpolated'],
                            default='Original')
        parser.add_argument(
            '--print-labels-by-slice',
            help="Print the number of labels for each Z-slice of each image.",
            action="store_true")
        parser.add_argument(
            '--label-search-value',
            help=
            "If provided, only this value is considered when using --print-labels-by-slice",
            default=0,
            type=int)
        parser.add_argument('--generate-random-labels',
                            help="Add random labels to the project file.",
                            action="store_true")
        parser.add_argument(
            '--random-label-value',
            help="The label value to use injecting random labels",
            default=1,
            type=int)
        parser.add_argument(
            '--random-label-count',
            help=
            "The number of random labels to inject via --generate-random-labels",
            default=2000,
            type=int)
        parser.add_argument(
            '--retrain',
            help=
            "Re-train the classifier based on labels stored in project file, and re-save.",
            action="store_true")

        # Parse the creation args: These were saved to the project file when this project was first created.
        parsed_creation_args, unused_args = parser.parse_known_args(
            project_creation_args)
        self.filter_implementation = parsed_creation_args.filter

        # Parse the cmdline args for the current session.
        parsed_args, unused_args = parser.parse_known_args(
            workflow_cmdline_args)
        self.print_labels_by_slice = parsed_args.print_labels_by_slice
        self.label_search_value = parsed_args.label_search_value
        self.generate_random_labels = parsed_args.generate_random_labels
        self.random_label_value = parsed_args.random_label_value
        self.random_label_count = parsed_args.random_label_count
        self.retrain = parsed_args.retrain

        if parsed_args.filter and parsed_args.filter != parsed_creation_args.filter:
            logger.error(
                "Ignoring new --filter setting.  Filter implementation cannot be changed after initial project creation."
            )

        # Applets for training (interactive) workflow
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(
            self,
            "Input Data",
            "Input Data",
            supportIlastik05Import=True,
            batchDataGui=False,
            instructionText=data_instructions)
        opDataSelection = self.dataSelectionApplet.topLevelOperator

        if ilastik_config.getboolean('ilastik', 'debug'):
            # see role constants, above
            role_names = ['Raw Data', 'Prediction Mask']
            opDataSelection.DatasetRoles.setValue(role_names)
        else:
            role_names = ['Raw Data']
            opDataSelection.DatasetRoles.setValue(role_names)

        self.featureSelectionApplet = FeatureSelectionApplet(
            self, "Feature Selection", "FeatureSelections",
            self.filter_implementation)

        self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        opClassify = self.pcApplet.topLevelOperator

        self.dataExportApplet = PixelClassificationDataExportApplet(
            self, "Prediction Export")
        opDataExport = self.dataExportApplet.topLevelOperator
        opDataExport.PmapColors.connect(opClassify.PmapColors)
        opDataExport.LabelNames.connect(opClassify.LabelNames)
        opDataExport.WorkingDirectory.connect(opDataSelection.WorkingDirectory)
        opDataExport.SelectionNames.setValue(self.EXPORT_NAMES)

        # Expose for shell
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.dataExportApplet)

        self._batch_input_args = None
        self._batch_export_args = None

        self.batchInputApplet = None
        self.batchResultsApplet = None
        if appendBatchOperators:
            # Create applets for batch workflow
            self.batchInputApplet = DataSelectionApplet(
                self,
                "Batch Prediction Input Selections",
                "Batch Inputs",
                supportIlastik05Import=False,
                batchDataGui=True)
            self.batchResultsApplet = PixelClassificationDataExportApplet(
                self, "Batch Prediction Output Locations", isBatch=True)

            # Expose in shell
            self._applets.append(self.batchInputApplet)
            self._applets.append(self.batchResultsApplet)

            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()

            if unused_args:
                # We parse the export setting args first.  All remaining args are considered input files by the input applet.
                self._batch_export_args, unused_args = self.batchResultsApplet.parse_known_cmdline_args(
                    unused_args)
                self._batch_input_args, unused_args = self.batchInputApplet.parse_known_cmdline_args(
                    unused_args)

        if unused_args:
            logger.warn("Unused command-line args: {}".format(unused_args))
예제 #10
0
    def __init__(self):
        super(PixelClassificationWorkflow, self).__init__()
        self._applets = []

        # Create a graph to be shared by all operators
        graph = Graph()
        self._graph = graph

        ######################
        # Interactive workflow
        ######################

        ## Create applets
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(
            graph,
            "Input Data",
            "Input Data",
            supportIlastik05Import=True,
            batchDataGui=False)
        self.featureSelectionApplet = FeatureSelectionApplet(
            graph, "Feature Selection", "FeatureSelections")
        self.pcApplet = PixelClassificationApplet(graph, "PixelClassification")

        ## Access applet operators
        opData = self.dataSelectionApplet.topLevelOperator
        opTrainingFeatures = self.featureSelectionApplet.topLevelOperator
        opClassify = self.pcApplet.topLevelOperator

        ## Connect operators ##

        # Input Image -> Feature Op
        #         and -> Classification Op (for display)
        opTrainingFeatures.InputImage.connect(opData.Image)
        opClassify.InputImages.connect(opData.Image)

        # Feature Images -> Classification Op (for training, prediction)
        opClassify.FeatureImages.connect(opTrainingFeatures.OutputImage)
        opClassify.CachedFeatureImages.connect(
            opTrainingFeatures.CachedOutputImage)

        # Training flags -> Classification Op (for GUI restrictions)
        opClassify.LabelsAllowedFlags.connect(opData.AllowLabels)

        ######################
        # Batch workflow
        ######################

        ## Create applets
        self.batchInputApplet = DataSelectionApplet(
            graph,
            "Batch Inputs",
            "BatchDataSelection",
            supportIlastik05Import=False,
            batchDataGui=True)
        self.batchResultsApplet = BatchIoApplet(graph, "Batch Results")

        ## Access applet operators
        opBatchInputs = self.batchInputApplet.topLevelOperator
        opBatchInputs.name = 'opBatchInputs'
        opBatchResults = self.batchResultsApplet.topLevelOperator

        ## Create additional batch workflow operators
        opBatchFeatures = OperatorWrapper(OpFeatureSelection,
                                          graph=graph,
                                          promotedSlotNames=['InputImage'])
        opBatchFeatures.name = "opBatchFeatures"
        opBatchPredictor = OperatorWrapper(OpPredictRandomForest,
                                           graph=graph,
                                           promotedSlotNames=['Image'])
        opBatchPredictor.name = "opBatchPredictor"
        opSelectBatchDatasetPath = OperatorWrapper(OpAttributeSelector,
                                                   graph=graph)

        ## Connect Operators ##

        # Provide dataset paths from data selection applet to the batch export applet via an attribute selector
        opSelectBatchDatasetPath.InputObject.connect(opBatchInputs.Dataset)
        opSelectBatchDatasetPath.AttributeName.setValue('filePath')
        opBatchResults.DatasetPath.connect(opSelectBatchDatasetPath.Result)

        # Connect (clone) the feature operator inputs from
        #  the interactive workflow's features operator (which gets them from the GUI)
        opBatchFeatures.Scales.connect(opTrainingFeatures.Scales)
        opBatchFeatures.FeatureIds.connect(opTrainingFeatures.FeatureIds)
        opBatchFeatures.SelectionMatrix.connect(
            opTrainingFeatures.SelectionMatrix)

        # Classifier and LabelsCount are provided by the interactive workflow
        opBatchPredictor.Classifier.connect(opClassify.Classifier)
        opBatchPredictor.LabelsCount.connect(opClassify.MaxLabelValue)

        # Connect Image pathway:
        # Input Image -> Features Op -> Prediction Op -> Export
        opBatchFeatures.InputImage.connect(opBatchInputs.Image)
        opBatchPredictor.Image.connect(opBatchFeatures.OutputImage)
        opBatchResults.ImageToExport.connect(opBatchPredictor.PMaps)

        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.batchInputApplet)
        self._applets.append(self.batchResultsApplet)

        # The shell needs a slot from which he can read the list of image names to switch between.
        # Use an OpAttributeSelector to create a slot containing just the filename from the OpDataSelection's DatasetInfo slot.
        opSelectFilename = OperatorWrapper(OpAttributeSelector, graph=graph)
        opSelectFilename.InputObject.connect(opData.Dataset)
        opSelectFilename.AttributeName.setValue('filePath')

        self._imageNameListSlot = opSelectFilename.Result