def __init__(self,
                 shell,
                 headless,
                 workflow_cmdline_args,
                 project_creation_args,
                 appendBatchOperators=True,
                 *args,
                 **kwargs):
        # Create a graph to be shared by all operators
        graph = Graph()
        super(AutocontextClassificationWorkflow,
              self).__init__(shell,
                             headless,
                             workflow_cmdline_args,
                             project_creation_args,
                             graph=graph,
                             *args,
                             **kwargs)
        self._applets = []

        ## Create applets
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(
            self,
            "Input Data",
            "Input Data",
            supportIlastik05Import=True,
            batchDataGui=False)
        self.featureSelectionApplet = FeatureSelectionApplet(
            self, "Feature Selection", "FeatureSelections")
        self.pcApplet = AutocontextClassificationApplet(
            self, "PixelClassification")

        # Autocontext constant
        opClassifyTopLevel = self.pcApplet.topLevelOperator
        opClassifyTopLevel.AutocontextIterations.setValue(2)

        # Expose for shell
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)

        if appendBatchOperators:
            # Create applets for batch workflow
            self.batchInputApplet = DataSelectionApplet(
                self,
                "Batch Prediction Input Selections",
                "BatchDataSelection",
                supportIlastik05Import=False,
                batchDataGui=True)
            self.batchResultsApplet = BatchIoApplet(
                self, "Batch Prediction Output Locations")

            # Expose in shell
            self._applets.append(self.batchInputApplet)
            self._applets.append(self.batchResultsApplet)

            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()
Exemplo n.º 2
0
    def __init__(self, carvingGraphFile):
        super(CarvingWorkflow, self).__init__()
        self._applets = []

        graph = Graph()
        self._graph = graph

        ## Create applets 
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(graph, "Input Data", "Input Data", supportIlastik05Import=True, batchDataGui=False)

        self.carvingApplet = CarvingApplet(graph, "xxx", carvingGraphFile)
        self.carvingApplet.topLevelOperator.RawData.connect( self.dataSelectionApplet.topLevelOperator.Image )
        self.carvingApplet.topLevelOperator.opLabeling.LabelsAllowedFlags.connect( self.dataSelectionApplet.topLevelOperator.AllowLabels )
        self.carvingApplet.gui.minLabelNumber = 2
        self.carvingApplet.gui.maxLabelNumber = 2

        ## Access applet operators
        opData = self.dataSelectionApplet.topLevelOperator
        
        ## Connect operators ##
        
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.carvingApplet)

        # The shell needs a slot from which he can read the list of image names to switch between.
        # Use an OpAttributeSelector to create a slot containing just the filename from the OpDataSelection's DatasetInfo slot.
        opSelectFilename = OperatorWrapper( OpAttributeSelector, graph=graph )
        opSelectFilename.InputObject.connect( opData.Dataset )
        opSelectFilename.AttributeName.setValue( 'filePath' )

        self._imageNameListSlot = opSelectFilename.Result
Exemplo n.º 3
0
    def __init__(self,
                 headless,
                 workflow_cmdline_args,
                 appendBatchOperators=True,
                 *args,
                 **kwargs):
        graph = kwargs['graph'] if 'graph' in kwargs else Graph()
        if 'graph' in kwargs: del kwargs['graph']
        super(CountingWorkflow, self).__init__(headless,
                                               graph=graph,
                                               *args,
                                               **kwargs)

        ######################
        # Interactive workflow
        ######################

        self.projectMetadataApplet = ProjectMetadataApplet()

        self.dataSelectionApplet = DataSelectionApplet(self,
                                                       "Data Selection",
                                                       "DataSelection",
                                                       batchDataGui=False,
                                                       force5d=False)
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetRoles.setValue(['Raw Data'])

        self.featureSelectionApplet = FeatureSelectionApplet(
            self, "Feature Selection", "FeatureSelections")

        #self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        self.countingApplet = CountingApplet(workflow=self)

        self._applets = []
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.countingApplet)

        if appendBatchOperators:
            # Create applets for batch workflow
            self.batchInputApplet = DataSelectionApplet(
                self,
                "Batch Prediction Input Selections",
                "BatchDataSelection",
                supportIlastik05Import=False,
                batchDataGui=True)
            self.batchResultsApplet = CountingBatchResultsApplet(
                self, "Batch Prediction Output Locations")

            # Expose in shell
            self._applets.append(self.batchInputApplet)
            self._applets.append(self.batchResultsApplet)

            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()
Exemplo n.º 4
0
    def __init__(self, shell, headless, workflow_cmdline_args, project_creation_args, hintoverlayFile=None, pmapoverlayFile=None, *args, **kwargs):
        if hintoverlayFile is not None:
            assert isinstance(hintoverlayFile, str), "hintoverlayFile should be a string, not '%s'" % type(hintoverlayFile)
        if pmapoverlayFile is not None:
            assert isinstance(pmapoverlayFile, str), "pmapoverlayFile should be a string, not '%s'" % type(pmapoverlayFile)

        graph = Graph()
        
        super(CarvingWorkflow, self).__init__(shell, headless, workflow_cmdline_args, project_creation_args, graph=graph, *args, **kwargs)
        
        data_instructions = "Select your input data using the 'Raw Data' tab shown on the right"
        
        ## Create applets 
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet( self,
                                                        "Input Data",
                                                        "Input Data",
                                                        supportIlastik05Import=True,
                                                        batchDataGui=False,
                                                        instructionText=data_instructions,
                                                        max_lanes=1 )
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetRoles.setValue( ['Raw Data'] )
        
        
        
        self.preprocessingApplet = PreprocessingApplet(workflow=self,
                                           title = "Preprocessing",
                                           projectFileGroupName="preprocessing")
        
        self.carvingApplet = CarvingApplet(workflow=self,
                                           projectFileGroupName="carving",
                                           hintOverlayFile=hintoverlayFile,
                                           pmapOverlayFile=pmapoverlayFile)
        
        #self.carvingApplet.topLevelOperator.MST.connect(self.preprocessingApplet.topLevelOperator.PreprocessedData)
        
        # Expose to shell
        self._applets = []
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.preprocessingApplet)
        self._applets.append(self.carvingApplet)
     def __init__(self, headless, workflow_cmdline_args, hintoverlayFile=None, pmapoverlayFile=None, *args, **kwargs):
         if workflow_cmdline_args:
             assert False, "Not using workflow cmdline args yet."
         
         if hintoverlayFile is not None:
             assert isinstance(hintoverlayFile, str), "hintoverlayFile should be a string, not '%s'" % type(hintoverlayFile)
         if pmapoverlayFile is not None:
             assert isinstance(pmapoverlayFile, str), "pmapoverlayFile should be a string, not '%s'" % type(pmapoverlayFile)
 
         graph = Graph()
         
         super(CarvingFromPixelPredictionsWorkflow, self).__init__(headless, *args, graph=graph, **kwargs)
         
         ## Create applets 
         self.projectMetadataApplet = ProjectMetadataApplet()
         self.dataSelectionApplet = DataSelectionApplet(self, "Input Data", "Input Data", supportIlastik05Import=True, batchDataGui=False)
         opDataSelection = self.dataSelectionApplet.topLevelOperator
         opDataSelection.DatasetRoles.setValue( ['Raw Data'] )
 
         self.featureSelectionApplet = FeatureSelectionApplet(self, "Feature Selection", "FeatureSelections")
         self.pixelClassificationApplet = PixelClassificationApplet(self, "PixelClassification")
         
         self.carvingApplet = CarvingApplet(workflow=self,
                                            projectFileGroupName="carving",
                                            hintOverlayFile=hintoverlayFile,
                                            pmapOverlayFile=pmapoverlayFile)
         
         self.preprocessingApplet = PreprocessingApplet(workflow=self,
                                            title = "Preprocessing",
                                            projectFileGroupName="carving")
         
         #self.carvingApplet.topLevelOperator.MST.connect(self.preprocessingApplet.topLevelOperator.PreprocessedData)
         
         # Expose to shell
         self._applets = []
         self._applets.append(self.projectMetadataApplet)
         self._applets.append(self.dataSelectionApplet)
         self._applets.append(self.featureSelectionApplet)
         self._applets.append(self.pixelClassificationApplet)
         self._applets.append(self.preprocessingApplet)
         self._applets.append(self.carvingApplet)
    def __init__(self, shell, headless,
                 workflow_cmdline_args,
                 project_creation_args,
                 *args, **kwargs):
        graph = kwargs['graph'] if 'graph' in kwargs else Graph()
        if 'graph' in kwargs:
            del kwargs['graph']
        super(ObjectClassificationWorkflow, self).__init__(shell, headless, workflow_cmdline_args, project_creation_args, graph=graph, *args, **kwargs)
        self.stored_pixel_classifier = None
        self.stored_object_classifier = None

        # Parse workflow-specific command-line args
        parser = argparse.ArgumentParser()
        parser.add_argument('--fillmissing', help="use 'fill missing' applet with chosen detection method", choices=['classic', 'svm', 'none'], default='none')
        parser.add_argument('--filter', help="pixel feature filter implementation.", choices=['Original', 'Refactored', 'Interpolated'], default='Original')
        parser.add_argument('--nobatch', help="do not append batch applets", action='store_true', default=False)
        
        parsed_creation_args, unused_args = parser.parse_known_args(project_creation_args)

        self.fillMissing = parsed_creation_args.fillmissing
        self.filter_implementation = parsed_creation_args.filter

        parsed_args, unused_args = parser.parse_known_args(workflow_cmdline_args)
        if parsed_args.fillmissing != 'none' and parsed_creation_args.fillmissing != parsed_args.fillmissing:
            logger.error( "Ignoring --fillmissing cmdline arg.  Can't specify a different fillmissing setting after the project has already been created." )
        
        if parsed_args.filter != 'Original' and parsed_creation_args.filter != parsed_args.filter:
            logger.error( "Ignoring --filter cmdline arg.  Can't specify a different filter setting after the project has already been created." )

        self.batch = not parsed_args.nobatch

        self._applets = []

        self.pcApplet = None
        self.projectMetadataApplet = ProjectMetadataApplet()
        self._applets.append(self.projectMetadataApplet)

        self.setupInputs()
        
        if self.fillMissing != 'none':
            self.fillMissingSlicesApplet = FillMissingSlicesApplet(
                self, "Fill Missing Slices", "Fill Missing Slices", self.fillMissing)
            self._applets.append(self.fillMissingSlicesApplet)

        if isinstance(self, ObjectClassificationWorkflowPixel):
            self.input_types = 'raw'
        elif isinstance(self, ObjectClassificationWorkflowBinary):
            self.input_types = 'raw+binary'
        elif isinstance( self, ObjectClassificationWorkflowPrediction ):
            self.input_types = 'raw+pmaps'
        
        # our main applets
        self.objectExtractionApplet = ObjectExtractionApplet(workflow=self, name = "Object Feature Selection")
        self.objectClassificationApplet = ObjectClassificationApplet(workflow=self)
        self.dataExportApplet = ObjectClassificationDataExportApplet(self, "Object Information Export")
        self.dataExportApplet.set_exporting_operator(self.objectClassificationApplet.topLevelOperator)

        # Customization hooks
        self.dataExportApplet.prepare_for_entire_export = self.prepare_for_entire_export
        #self.dataExportApplet.prepare_lane_for_export = self.prepare_lane_for_export
        self.dataExportApplet.post_process_lane_export = self.post_process_lane_export
        self.dataExportApplet.post_process_entire_export = self.post_process_entire_export
        
        opDataExport = self.dataExportApplet.topLevelOperator
        opDataExport.WorkingDirectory.connect( self.dataSelectionApplet.topLevelOperator.WorkingDirectory )
        
        # See EXPORT_SELECTION_PREDICTIONS and EXPORT_SELECTION_PROBABILITIES, above
        export_selection_names = ['Object Predictions',
                                  'Object Probabilities',
                                  'Blockwise Object Predictions',
                                  'Blockwise Object Probabilities']
        if self.input_types == 'raw':
            # Re-configure to add the pixel probabilities option
            # See EXPORT_SELECTION_PIXEL_PROBABILITIES, above
            export_selection_names.append( 'Pixel Probabilities' )
        opDataExport.SelectionNames.setValue( export_selection_names )

        self._batch_export_args = None
        self._batch_input_args = None
        self._export_args = None
        self.batchProcessingApplet = None
        if self.batch:
            self.batchProcessingApplet = BatchProcessingApplet(self, 
                                                               "Batch Processing", 
                                                               self.dataSelectionApplet, 
                                                               self.dataExportApplet)
    
            if unused_args:
                # Additional export args (specific to the object classification workflow)
                export_arg_parser = argparse.ArgumentParser()
                export_arg_parser.add_argument( "--table_filename", help="The location to export the object feature/prediction CSV file.", required=False )
                export_arg_parser.add_argument( "--export_object_prediction_img", action="store_true" )
                export_arg_parser.add_argument( "--export_object_probability_img", action="store_true" )
                
                # TODO: Support this, too, someday?
                #export_arg_parser.add_argument( "--export_object_label_img", action="store_true" )
                
                if self.input_types == 'raw':
                    export_arg_parser.add_argument( "--export_pixel_probability_img", action="store_true" )
                self._export_args, unused_args = export_arg_parser.parse_known_args(unused_args)
                self._export_args.export_pixel_probability_img = self._export_args.export_pixel_probability_img or None

                # We parse the export setting args first.  All remaining args are considered input files by the input applet.
                self._batch_export_args, unused_args = self.dataExportApplet.parse_known_cmdline_args( unused_args )
                self._batch_input_args, unused_args = self.batchProcessingApplet.parse_known_cmdline_args( unused_args )

                # For backwards compatibility, translate these special args into the standard syntax
                if self._export_args.export_object_prediction_img:
                    self._batch_input_args.export_source = "Object Predictions"
                if self._export_args.export_object_probability_img:
                    self._batch_input_args.export_source = "Object Probabilities"
                if self._export_args.export_pixel_probability_img:
                    self._batch_input_args.export_source = "Pixel Probabilities"


        self.blockwiseObjectClassificationApplet = BlockwiseObjectClassificationApplet(
            self, "Blockwise Object Classification", "Blockwise Object Classification")

        self._applets.append(self.objectExtractionApplet)
        self._applets.append(self.objectClassificationApplet)
        self._applets.append(self.dataExportApplet)
        if self.batchProcessingApplet:
            self._applets.append(self.batchProcessingApplet)
        self._applets.append(self.blockwiseObjectClassificationApplet)

        if unused_args:
            logger.warn("Unused command-line args: {}".format( unused_args ))
Exemplo n.º 7
0
    def __init__(self, shell, headless, workflow_cmdline_args, project_creation_args, *args, **kwargs):
        # Create a graph to be shared by all operators
        graph = Graph()
        super( PixelClassificationWorkflow, self ).__init__( shell, headless, workflow_cmdline_args, project_creation_args, graph=graph, *args, **kwargs )
        self.stored_classifer = None
        self._applets = []
        self._workflow_cmdline_args = workflow_cmdline_args
        # Parse workflow-specific command-line args
        parser = argparse.ArgumentParser()
        parser.add_argument('--filter', help="pixel feature filter implementation.", choices=['Original', 'Refactored', 'Interpolated'], default='Original')
        parser.add_argument('--print-labels-by-slice', help="Print the number of labels for each Z-slice of each image.", action="store_true")
        parser.add_argument('--label-search-value', help="If provided, only this value is considered when using --print-labels-by-slice", default=0, type=int)
        parser.add_argument('--generate-random-labels', help="Add random labels to the project file.", action="store_true")
        parser.add_argument('--random-label-value', help="The label value to use injecting random labels", default=1, type=int)
        parser.add_argument('--random-label-count', help="The number of random labels to inject via --generate-random-labels", default=2000, type=int)
        parser.add_argument('--retrain', help="Re-train the classifier based on labels stored in project file, and re-save.", action="store_true")
        parser.add_argument('--tree-count', help='Number of trees for Vigra RF classifier.', type=int)
        parser.add_argument('--variable-importance-path', help='Location of variable-importance table.', type=str)
        parser.add_argument('--label-proportion', help='Proportion of feature-pixels used to train the classifier.', type=float)

        # Parse the creation args: These were saved to the project file when this project was first created.
        parsed_creation_args, unused_args = parser.parse_known_args(project_creation_args)
        self.filter_implementation = parsed_creation_args.filter
        
        # Parse the cmdline args for the current session.
        parsed_args, unused_args = parser.parse_known_args(workflow_cmdline_args)
        self.print_labels_by_slice = parsed_args.print_labels_by_slice
        self.label_search_value = parsed_args.label_search_value
        self.generate_random_labels = parsed_args.generate_random_labels
        self.random_label_value = parsed_args.random_label_value
        self.random_label_count = parsed_args.random_label_count
        self.retrain = parsed_args.retrain
        self.tree_count = parsed_args.tree_count
        self.variable_importance_path = parsed_args.variable_importance_path
        self.label_proportion = parsed_args.label_proportion

        if parsed_args.filter and parsed_args.filter != parsed_creation_args.filter:
            logger.error("Ignoring new --filter setting.  Filter implementation cannot be changed after initial project creation.")
        
        data_instructions = "Select your input data using the 'Raw Data' tab shown on the right.\n\n"\
                            "Power users: Optionally use the 'Prediction Mask' tab to supply a binary image that tells ilastik where it should avoid computations you don't need."

        # Applets for training (interactive) workflow 
        self.projectMetadataApplet = ProjectMetadataApplet()
        
        self.dataSelectionApplet = self.createDataSelectionApplet()
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        
        # see role constants, above
        opDataSelection.DatasetRoles.setValue( PixelClassificationWorkflow.ROLE_NAMES )

        self.featureSelectionApplet = self.createFeatureSelectionApplet()

        self.pcApplet = self.createPixelClassificationApplet()
        opClassify = self.pcApplet.topLevelOperator

        self.dataExportApplet = PixelClassificationDataExportApplet(self, "Prediction Export")
        opDataExport = self.dataExportApplet.topLevelOperator
        opDataExport.PmapColors.connect( opClassify.PmapColors )
        opDataExport.LabelNames.connect( opClassify.LabelNames )
        opDataExport.WorkingDirectory.connect( opDataSelection.WorkingDirectory )
        opDataExport.SelectionNames.setValue( self.EXPORT_NAMES )        

        # Expose for shell
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.dataExportApplet)
        
        self.dataExportApplet.prepare_for_entire_export = self.prepare_for_entire_export
        self.dataExportApplet.post_process_entire_export = self.post_process_entire_export

        self.batchProcessingApplet = BatchProcessingApplet(self, 
                                                           "Batch Processing", 
                                                           self.dataSelectionApplet, 
                                                           self.dataExportApplet)

        self._applets.append(self.batchProcessingApplet)
        if unused_args:
            # We parse the export setting args first.  All remaining args are considered input files by the input applet.
            self._batch_export_args, unused_args = self.dataExportApplet.parse_known_cmdline_args( unused_args )
            self._batch_input_args, unused_args = self.batchProcessingApplet.parse_known_cmdline_args( unused_args )
        else:
            self._batch_input_args = None
            self._batch_export_args = None

        if unused_args:
            logger.warn("Unused command-line args: {}".format( unused_args ))
    def __init__(self,
                 shell,
                 headless,
                 workflow_cmdline_args,
                 project_creation_args,
                 appendBatchOperators=True,
                 *args,
                 **kwargs):
        # Create a graph to be shared by all operators
        graph = Graph()
        super(PixelClassificationWorkflow,
              self).__init__(shell,
                             headless,
                             workflow_cmdline_args,
                             project_creation_args,
                             graph=graph,
                             *args,
                             **kwargs)
        self._applets = []
        self._workflow_cmdline_args = workflow_cmdline_args

        data_instructions = "Select your input data using the 'Raw Data' tab shown on the right"

        # Parse workflow-specific command-line args
        parser = argparse.ArgumentParser()
        parser.add_argument('--filter',
                            help="pixel feature filter implementation.",
                            choices=['Original', 'Refactored', 'Interpolated'],
                            default='Original')
        parser.add_argument(
            '--print-labels-by-slice',
            help="Print the number of labels for each Z-slice of each image.",
            action="store_true")
        parser.add_argument(
            '--label-search-value',
            help=
            "If provided, only this value is considered when using --print-labels-by-slice",
            default=0,
            type=int)
        parser.add_argument('--generate-random-labels',
                            help="Add random labels to the project file.",
                            action="store_true")
        parser.add_argument(
            '--random-label-value',
            help="The label value to use injecting random labels",
            default=1,
            type=int)
        parser.add_argument(
            '--random-label-count',
            help=
            "The number of random labels to inject via --generate-random-labels",
            default=2000,
            type=int)
        parser.add_argument(
            '--retrain',
            help=
            "Re-train the classifier based on labels stored in project file, and re-save.",
            action="store_true")

        # Parse the creation args: These were saved to the project file when this project was first created.
        parsed_creation_args, unused_args = parser.parse_known_args(
            project_creation_args)
        self.filter_implementation = parsed_creation_args.filter

        # Parse the cmdline args for the current session.
        parsed_args, unused_args = parser.parse_known_args(
            workflow_cmdline_args)
        self.print_labels_by_slice = parsed_args.print_labels_by_slice
        self.label_search_value = parsed_args.label_search_value
        self.generate_random_labels = parsed_args.generate_random_labels
        self.random_label_value = parsed_args.random_label_value
        self.random_label_count = parsed_args.random_label_count
        self.retrain = parsed_args.retrain

        if parsed_args.filter and parsed_args.filter != parsed_creation_args.filter:
            logger.error(
                "Ignoring new --filter setting.  Filter implementation cannot be changed after initial project creation."
            )

        # Applets for training (interactive) workflow
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(
            self,
            "Input Data",
            "Input Data",
            supportIlastik05Import=True,
            batchDataGui=False,
            instructionText=data_instructions)
        opDataSelection = self.dataSelectionApplet.topLevelOperator

        if ilastik_config.getboolean('ilastik', 'debug'):
            # see role constants, above
            role_names = ['Raw Data', 'Prediction Mask']
            opDataSelection.DatasetRoles.setValue(role_names)
        else:
            role_names = ['Raw Data']
            opDataSelection.DatasetRoles.setValue(role_names)

        self.featureSelectionApplet = FeatureSelectionApplet(
            self, "Feature Selection", "FeatureSelections",
            self.filter_implementation)

        self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        opClassify = self.pcApplet.topLevelOperator

        self.dataExportApplet = PixelClassificationDataExportApplet(
            self, "Prediction Export")
        opDataExport = self.dataExportApplet.topLevelOperator
        opDataExport.PmapColors.connect(opClassify.PmapColors)
        opDataExport.LabelNames.connect(opClassify.LabelNames)
        opDataExport.WorkingDirectory.connect(opDataSelection.WorkingDirectory)
        opDataExport.SelectionNames.setValue(self.EXPORT_NAMES)

        # Expose for shell
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.dataExportApplet)

        self._batch_input_args = None
        self._batch_export_args = None

        self.batchInputApplet = None
        self.batchResultsApplet = None
        if appendBatchOperators:
            # Create applets for batch workflow
            self.batchInputApplet = DataSelectionApplet(
                self,
                "Batch Prediction Input Selections",
                "Batch Inputs",
                supportIlastik05Import=False,
                batchDataGui=True)
            self.batchResultsApplet = PixelClassificationDataExportApplet(
                self, "Batch Prediction Output Locations", isBatch=True)

            # Expose in shell
            self._applets.append(self.batchInputApplet)
            self._applets.append(self.batchResultsApplet)

            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()

            if unused_args:
                # We parse the export setting args first.  All remaining args are considered input files by the input applet.
                self._batch_export_args, unused_args = self.batchResultsApplet.parse_known_cmdline_args(
                    unused_args)
                self._batch_input_args, unused_args = self.batchInputApplet.parse_known_cmdline_args(
                    unused_args)

        if unused_args:
            logger.warn("Unused command-line args: {}".format(unused_args))
Exemplo n.º 9
0
    def __init__(self, shell, headless, workflow_cmdline_args, project_creation_args, appendBatchOperators=True, *args, **kwargs):
        graph = kwargs['graph'] if 'graph' in kwargs else Graph()
        if 'graph' in kwargs: del kwargs['graph']
        super( CountingWorkflow, self ).__init__( shell, headless, workflow_cmdline_args, project_creation_args, graph=graph, *args, **kwargs )

        # Parse workflow-specific command-line args
        parser = argparse.ArgumentParser()
        parser.add_argument("--csv-export-file", help="Instead of exporting prediction density images, export total counts to the given csv path.")
        self.parsed_counting_workflow_args, unused_args = parser.parse_known_args(workflow_cmdline_args)

        ######################
        # Interactive workflow
        ######################

        self.projectMetadataApplet = ProjectMetadataApplet()

        self.dataSelectionApplet = DataSelectionApplet(self,
                                                       "Input Data",
                                                       "Input Data",
                                                       batchDataGui=False,
                                                       force5d=False
                                                      )
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        role_names = ['Raw Data']
        opDataSelection.DatasetRoles.setValue( role_names )

        self.featureSelectionApplet = FeatureSelectionApplet(self,
                                                             "Feature Selection",
                                                             "FeatureSelections")

        #self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        self.countingApplet = CountingApplet(workflow=self)
        opCounting = self.countingApplet.topLevelOperator

        self.dataExportApplet = CountingDataExportApplet(self, "Density Export", opCounting)
        
        opDataExport = self.dataExportApplet.topLevelOperator
        opDataExport.PmapColors.connect(opCounting.PmapColors)
        opDataExport.LabelNames.connect(opCounting.LabelNames)
        opDataExport.UpperBound.connect(opCounting.UpperBound)
        opDataExport.WorkingDirectory.connect(opDataSelection.WorkingDirectory)
        opDataExport.SelectionNames.setValue( ['Probabilities'] )        

        self._applets = []
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.countingApplet)
        self._applets.append(self.dataExportApplet)


        self._batch_input_args = None
        self._batch_export_args = None

        self.batchInputApplet = None
        self.batchResultsApplet = None

        if appendBatchOperators:
            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()
            if unused_args:
                # We parse the export setting args first.
                # All remaining args are considered input files by the input applet.
                self._batch_export_args, unused_args = self.batchResultsApplet.parse_known_cmdline_args( unused_args )
                self._batch_input_args, unused_args = self.batchInputApplet.parse_known_cmdline_args( unused_args, role_names )
Exemplo n.º 10
0
    def __init__(self,
                 shell,
                 headless,
                 workflow_cmdline_args,
                 project_creation_args,
                 hintoverlayFile=None,
                 pmapoverlayFile=None,
                 *args,
                 **kwargs):
        graph = Graph()

        super(SplitBodyCarvingWorkflow, self).__init__(shell,
                                                       headless,
                                                       workflow_cmdline_args,
                                                       project_creation_args,
                                                       *args,
                                                       graph=graph,
                                                       **kwargs)

        ## Create applets
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(
            self,
            "Input Data",
            "Input Data",
            supportIlastik05Import=True,
            batchDataGui=False)
        opDataSelection = self.dataSelectionApplet.topLevelOperator

        opDataSelection.DatasetRoles.setValue(
            ['Raw Data', 'Pixel Probabilities', 'Raveler Labels'])

        self.preprocessingApplet = PreprocessingApplet(
            workflow=self,
            title="Preprocessing",
            projectFileGroupName="preprocessing")

        self.splitBodyCarvingApplet = SplitBodyCarvingApplet(
            workflow=self, projectFileGroupName="carving")

        self.splitBodyPostprocessingApplet = SplitBodyPostprocessingApplet(
            workflow=self)
        self.splitBodySupervoxelExportApplet = SplitBodySupervoxelExportApplet(
            workflow=self)

        # Expose to shell
        self._applets = []
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.preprocessingApplet)
        self._applets.append(self.splitBodyCarvingApplet)
        self._applets.append(self.splitBodyPostprocessingApplet)
        self._applets.append(self.splitBodySupervoxelExportApplet)

        self._split_tool_params = None
        if workflow_cmdline_args:
            arg_parser = argparse.ArgumentParser(
                description=
                "Specify parameters for the split-body carving workflow")
            arg_parser.add_argument('--split_tool_param_file', required=False)
            parsed_args, unused_args = arg_parser.parse_known_args(
                workflow_cmdline_args)
            if unused_args:
                logger.warn("Unused command-line args: {}".format(unused_args))

            if parsed_args.split_tool_param_file is None:
                logger.warn("Missing cmd-line arg: --split_tool_param_file")
            else:
                logger.debug("Parsing split tool parameters: {}".format(
                    parsed_args.split_tool_param_file))
                json_parser = JsonConfigParser(SplitToolParamsSchema)
                self._split_tool_params = json_parser.parseConfigFile(
                    parsed_args.split_tool_param_file)
Exemplo n.º 11
0
    def __init__(self, shell, headless, workflow_cmdline_args,
                 project_creation_args, *args, **kwargs):
        graph = kwargs['graph'] if 'graph' in kwargs else Graph()
        if 'graph' in kwargs: del kwargs['graph']
        super(CountingWorkflow, self).__init__(shell,
                                               headless,
                                               workflow_cmdline_args,
                                               project_creation_args,
                                               graph=graph,
                                               *args,
                                               **kwargs)
        self.stored_classifier = None

        # Parse workflow-specific command-line args
        parser = argparse.ArgumentParser()
        parser.add_argument(
            "--csv-export-file",
            help=
            "Instead of exporting prediction density images, export total counts to the given csv path."
        )
        self.parsed_counting_workflow_args, unused_args = parser.parse_known_args(
            workflow_cmdline_args)

        ######################
        # Interactive workflow
        ######################

        self.projectMetadataApplet = ProjectMetadataApplet()

        self.dataSelectionApplet = DataSelectionApplet(self, "Input Data",
                                                       "Input Data")
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        role_names = ['Raw Data']
        opDataSelection.DatasetRoles.setValue(role_names)

        self.featureSelectionApplet = FeatureSelectionApplet(
            self, "Feature Selection", "FeatureSelections")

        self.countingApplet = CountingApplet(workflow=self)
        opCounting = self.countingApplet.topLevelOperator

        self.dataExportApplet = CountingDataExportApplet(
            self, "Density Export", opCounting)

        # Customization hooks
        self.dataExportApplet.prepare_for_entire_export = self.prepare_for_entire_export
        self.dataExportApplet.post_process_lane_export = self.post_process_lane_export
        self.dataExportApplet.post_process_entire_export = self.post_process_entire_export

        opDataExport = self.dataExportApplet.topLevelOperator
        opDataExport.PmapColors.connect(opCounting.PmapColors)
        opDataExport.LabelNames.connect(opCounting.LabelNames)
        opDataExport.UpperBound.connect(opCounting.UpperBound)
        opDataExport.WorkingDirectory.connect(opDataSelection.WorkingDirectory)
        opDataExport.SelectionNames.setValue(['Probabilities'])

        self._applets = []
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.countingApplet)
        self._applets.append(self.dataExportApplet)

        self.batchProcessingApplet = BatchProcessingApplet(
            self, "Batch Processing", self.dataSelectionApplet,
            self.dataExportApplet)
        self._applets.append(self.batchProcessingApplet)
        if unused_args:
            # We parse the export setting args first.  All remaining args are considered input files by the input applet.
            self._batch_export_args, unused_args = self.dataExportApplet.parse_known_cmdline_args(
                unused_args)
            self._batch_input_args, unused_args = self.batchProcessingApplet.parse_known_cmdline_args(
                unused_args)
        else:
            self._batch_input_args = None
            self._batch_export_args = None

        if unused_args:
            logger.warn("Unused command-line args: {}".format(unused_args))
Exemplo n.º 12
0
    def __init__(self):
        super(PixelClassificationWorkflow, self).__init__()
        self._applets = []

        # Create a graph to be shared by all operators
        graph = Graph()
        self._graph = graph

        ######################
        # Interactive workflow
        ######################

        ## Create applets
        self.projectMetadataApplet = ProjectMetadataApplet()
        self.dataSelectionApplet = DataSelectionApplet(
            graph,
            "Input Data",
            "Input Data",
            supportIlastik05Import=True,
            batchDataGui=False)
        self.featureSelectionApplet = FeatureSelectionApplet(
            graph, "Feature Selection", "FeatureSelections")
        self.pcApplet = PixelClassificationApplet(graph, "PixelClassification")

        ## Access applet operators
        opData = self.dataSelectionApplet.topLevelOperator
        opTrainingFeatures = self.featureSelectionApplet.topLevelOperator
        opClassify = self.pcApplet.topLevelOperator

        ## Connect operators ##

        # Input Image -> Feature Op
        #         and -> Classification Op (for display)
        opTrainingFeatures.InputImage.connect(opData.Image)
        opClassify.InputImages.connect(opData.Image)

        # Feature Images -> Classification Op (for training, prediction)
        opClassify.FeatureImages.connect(opTrainingFeatures.OutputImage)
        opClassify.CachedFeatureImages.connect(
            opTrainingFeatures.CachedOutputImage)

        # Training flags -> Classification Op (for GUI restrictions)
        opClassify.LabelsAllowedFlags.connect(opData.AllowLabels)

        ######################
        # Batch workflow
        ######################

        ## Create applets
        self.batchInputApplet = DataSelectionApplet(
            graph,
            "Batch Inputs",
            "BatchDataSelection",
            supportIlastik05Import=False,
            batchDataGui=True)
        self.batchResultsApplet = BatchIoApplet(graph, "Batch Results")

        ## Access applet operators
        opBatchInputs = self.batchInputApplet.topLevelOperator
        opBatchInputs.name = 'opBatchInputs'
        opBatchResults = self.batchResultsApplet.topLevelOperator

        ## Create additional batch workflow operators
        opBatchFeatures = OperatorWrapper(OpFeatureSelection,
                                          graph=graph,
                                          promotedSlotNames=['InputImage'])
        opBatchFeatures.name = "opBatchFeatures"
        opBatchPredictor = OperatorWrapper(OpPredictRandomForest,
                                           graph=graph,
                                           promotedSlotNames=['Image'])
        opBatchPredictor.name = "opBatchPredictor"
        opSelectBatchDatasetPath = OperatorWrapper(OpAttributeSelector,
                                                   graph=graph)

        ## Connect Operators ##

        # Provide dataset paths from data selection applet to the batch export applet via an attribute selector
        opSelectBatchDatasetPath.InputObject.connect(opBatchInputs.Dataset)
        opSelectBatchDatasetPath.AttributeName.setValue('filePath')
        opBatchResults.DatasetPath.connect(opSelectBatchDatasetPath.Result)

        # Connect (clone) the feature operator inputs from
        #  the interactive workflow's features operator (which gets them from the GUI)
        opBatchFeatures.Scales.connect(opTrainingFeatures.Scales)
        opBatchFeatures.FeatureIds.connect(opTrainingFeatures.FeatureIds)
        opBatchFeatures.SelectionMatrix.connect(
            opTrainingFeatures.SelectionMatrix)

        # Classifier and LabelsCount are provided by the interactive workflow
        opBatchPredictor.Classifier.connect(opClassify.Classifier)
        opBatchPredictor.LabelsCount.connect(opClassify.MaxLabelValue)

        # Connect Image pathway:
        # Input Image -> Features Op -> Prediction Op -> Export
        opBatchFeatures.InputImage.connect(opBatchInputs.Image)
        opBatchPredictor.Image.connect(opBatchFeatures.OutputImage)
        opBatchResults.ImageToExport.connect(opBatchPredictor.PMaps)

        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.pcApplet)
        self._applets.append(self.batchInputApplet)
        self._applets.append(self.batchResultsApplet)

        # The shell needs a slot from which he can read the list of image names to switch between.
        # Use an OpAttributeSelector to create a slot containing just the filename from the OpDataSelection's DatasetInfo slot.
        opSelectFilename = OperatorWrapper(OpAttributeSelector, graph=graph)
        opSelectFilename.InputObject.connect(opData.Dataset)
        opSelectFilename.AttributeName.setValue('filePath')

        self._imageNameListSlot = opSelectFilename.Result
Exemplo n.º 13
0
    def __init__(self,
                 shell,
                 headless,
                 workflow_cmdline_args,
                 project_creation_args,
                 appendBatchOperators=True,
                 *args,
                 **kwargs):
        graph = kwargs['graph'] if 'graph' in kwargs else Graph()
        if 'graph' in kwargs: del kwargs['graph']
        super(CountingWorkflow, self).__init__(shell,
                                               headless,
                                               workflow_cmdline_args,
                                               project_creation_args,
                                               graph=graph,
                                               *args,
                                               **kwargs)

        ######################
        # Interactive workflow
        ######################

        self.projectMetadataApplet = ProjectMetadataApplet()

        self.dataSelectionApplet = DataSelectionApplet(self,
                                                       "Input Data",
                                                       "Input Data",
                                                       batchDataGui=False,
                                                       force5d=False)
        opDataSelection = self.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetRoles.setValue(['Raw Data'])

        self.featureSelectionApplet = FeatureSelectionApplet(
            self, "Feature Selection", "FeatureSelections")

        #self.pcApplet = PixelClassificationApplet(self, "PixelClassification")
        self.countingApplet = CountingApplet(workflow=self)
        opCounting = self.countingApplet.topLevelOperator

        self.dataExportApplet = CountingDataExportApplet(
            self, "Density Export")

        opDataExport = self.dataExportApplet.topLevelOperator
        opDataExport.PmapColors.connect(opCounting.PmapColors)
        opDataExport.LabelNames.connect(opCounting.LabelNames)
        opDataExport.UpperBound.connect(opCounting.UpperBound)
        opDataExport.WorkingDirectory.connect(opDataSelection.WorkingDirectory)
        opDataExport.SelectionNames.setValue(['Probabilities'])

        self._applets = []
        self._applets.append(self.projectMetadataApplet)
        self._applets.append(self.dataSelectionApplet)
        self._applets.append(self.featureSelectionApplet)
        self._applets.append(self.countingApplet)
        self._applets.append(self.dataExportApplet)

        if appendBatchOperators:
            # Create applets for batch workflow
            self.batchInputApplet = DataSelectionApplet(
                self,
                "Batch Prediction Input Selections",
                "BatchDataSelection",
                supportIlastik05Import=False,
                batchDataGui=True)
            self.batchResultsApplet = CountingDataExportApplet(
                self, "Batch Prediction Output Locations", isBatch=True)

            # Expose in shell
            self._applets.append(self.batchInputApplet)
            self._applets.append(self.batchResultsApplet)

            # Connect batch workflow (NOT lane-based)
            self._initBatchWorkflow()