Ejemplo n.º 1
0
def runWorkflow(parsed_args):
    args = parsed_args
    
    # Make sure project file exists.
    if not os.path.exists(args.project):
        raise RuntimeError("Project file '" + args.project + "' does not exist.")

    # Make sure batch inputs exist.
    for p in args.batch_inputs:
        error = False
        p = PathComponents(p).externalPath
        if not os.path.exists(p):
            logger.error("Batch input file does not exist: " + p)
            error = True
        if error:
            raise RuntimeError("Could not find one or more batch inputs.  See logged errors.")

    if not args.generate_project_predictions and len(args.batch_inputs) == 0:
        logger.error("Command-line arguments didn't specify a workload.")
        return

    # Instantiate 'shell'
    shell, workflow = startShellHeadless( PixelClassificationWorkflow )
    
    if args.assume_old_ilp_axes:
        # Special hack for Janelia: 
        # In some old versions of 0.5, the data was stored in tyxzc order.
        # We have no way of inspecting the data to determine this, so we allow 
        #  users to specify that their ilp is very old using the 
        #  assume_old_ilp_axes command-line flag
        ilastik.utility.globals.ImportOptions.default_axis_order = 'tyxzc'

    # Load project (auto-import it if necessary)
    logger.info("Opening project: '" + args.project + "'")
    shell.openProjectPath(args.project)

    # Predictions for project input datasets
    if args.generate_project_predictions:
        generateProjectPredictions(shell, workflow)

    # Predictions for other datasets ('batch datasets')
    result = True
    if len(args.batch_inputs) > 0:
        result = generateBatchPredictions(workflow,
                                          args.batch_inputs,
                                          args.batch_export_dir,
                                          args.batch_output_suffix,
                                          args.batch_output_dataset_name)

    logger.info("Closing project...")
    shell.projectManager.closeCurrentProject()
    
    assert result    
    
    logger.info("FINISHED.")
Ejemplo n.º 2
0
    def create_new_tst_project(cls):
        # Instantiate 'shell'
        shell, workflow = startShellHeadless(PixelClassificationWorkflow)

        # Create a blank project file and load it.
        newProjectFilePath = cls.PROJECT_FILE
        newProjectFile = shell.projectManager.createBlankProjectFile(
            newProjectFilePath)
        shell.projectManager.loadProject(newProjectFile, newProjectFilePath,
                                         False)

        # Add a file
        from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
        info = DatasetInfo()
        info.filePath = cls.SAMPLE_DATA
        opDataSelection = workflow.dataSelectionApplet.topLevelOperator
        opDataSelection.Dataset.resize(1)
        opDataSelection.Dataset[0].setValue(info)

        # Set some features
        ScalesList = [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0]
        FeatureIds = [
            'GaussianSmoothing', 'LaplacianOfGaussian',
            'StructureTensorEigenvalues', 'HessianOfGaussianEigenvalues',
            'GaussianGradientMagnitude', 'DifferenceOfGaussians'
        ]

        opFeatures = workflow.featureSelectionApplet.topLevelOperator
        opFeatures.Scales.setValue(ScalesList)
        opFeatures.FeatureIds.setValue(FeatureIds)

        #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
        selections = numpy.array(
            [[True, False, False, False, False, False, False],
             [True, False, False, False, False, False, False],
             [True, False, False, False, False, False, False],
             [False, False, False, False, False, False, False],
             [False, False, False, False, False, False, False],
             [False, False, False, False, False, False, False]])
        opFeatures.SelectionMatrix.setValue(selections)

        # Add some labels directly to the operator
        opPixelClass = workflow.pcApplet.topLevelOperator

        slicing1 = sl[0:1, 0:10, 0:10, 0:1, 0:1]
        labels1 = 1 * numpy.ones(slicing2shape(slicing1), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing1] = labels1

        slicing2 = sl[0:1, 0:10, 10:20, 0:1, 0:1]
        labels2 = 2 * numpy.ones(slicing2shape(slicing2), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing2] = labels2

        # Save and close
        shell.projectManager.saveProject()
        shell.projectManager.closeCurrentProject()
    def create_new_tst_project(cls):
        # Instantiate 'shell'
        shell, workflow = startShellHeadless( PixelClassificationWorkflow )
        
        # Create a blank project file and load it.
        newProjectFilePath = cls.PROJECT_FILE
        newProjectFile = shell.projectManager.createBlankProjectFile(newProjectFilePath)
        shell.projectManager.loadProject(newProjectFile, newProjectFilePath, False)
        
        # Add a file
        from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
        info = DatasetInfo()
        info.filePath = cls.SAMPLE_DATA
        opDataSelection = workflow.dataSelectionApplet.topLevelOperator
        opDataSelection.Dataset.resize(1)
        opDataSelection.Dataset[0].setValue(info)
        
        # Set some features
        ScalesList = [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0]    
        FeatureIds = [ 'GaussianSmoothing',
                       'LaplacianOfGaussian',
                       'StructureTensorEigenvalues',
                       'HessianOfGaussianEigenvalues',
                       'GaussianGradientMagnitude',
                       'DifferenceOfGaussians' ]

        opFeatures = workflow.featureSelectionApplet.topLevelOperator
        opFeatures.Scales.setValue( ScalesList )
        opFeatures.FeatureIds.setValue( FeatureIds )

        #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
        selections = numpy.array( [[True, False, False, False, False, False, False],
                                   [True, False, False, False, False, False, False],
                                   [True, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False]] )
        opFeatures.SelectionMatrix.setValue(selections)
    
        # Add some labels directly to the operator
        opPixelClass = workflow.pcApplet.topLevelOperator

        slicing1 = sl[0:1,0:10,0:10,0:1,0:1]
        labels1 = 1 * numpy.ones(slicing2shape(slicing1), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing1] = labels1

        slicing2 = sl[0:1,0:10,10:20,0:1,0:1]
        labels2 = 2 * numpy.ones(slicing2shape(slicing2), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing2] = labels2
        
        # Save and close
        shell.projectManager.saveProject()
        shell.projectManager.closeCurrentProject()
def runWorkflow(parsed_args):
    args = parsed_args
    
    # Make sure project file exists.
    if not os.path.exists(args.project):
        raise RuntimeError("Project file '" + args.project + "' does not exist.")

    # Make sure batch inputs exist.
    for p in args.batch_inputs:
        print p
        error = False
        p = PathComponents(p).externalPath
        if not os.path.exists(p):
            logger.error("Batch input file does not exist: " + p)
            error = True
        if error:
            raise RuntimeError("Could not find one or more batch inputs.  See logged errors.")

    if not args.generate_project_predictions and len(args.batch_inputs) == 0:
        logger.error("Command-line arguments didn't specify a workload.")
        return

    # Instantiate 'shell'
    shell, workflow = startShellHeadless( AutocontextClassificationWorkflow )
    
    # Load project (auto-import it if necessary)
    logger.info("Opening project: '" + args.project + "'")
    shell.openProjectPath(args.project)

    # Predictions for project input datasets
    if args.generate_project_predictions:
        generateProjectPredictions(shell, workflow)

    # Predictions for other datasets ('batch datasets')
    result = True
    if len(args.batch_inputs) > 0:
        result = generateBatchPredictions(workflow,
                                          args.batch_inputs,
                                          args.batch_export_dir,
                                          args.batch_output_suffix,
                                          args.batch_output_dataset_name)

    logger.info("Closing project...")
    shell.projectManager.closeCurrentProject()
    
    assert result    
    
    logger.info("FINISHED.")
Ejemplo n.º 5
0
def runWorkflow(parsed_args):
    args = parsed_args
    
    # Make sure project file exists.
    if not os.path.exists(args.project):
        raise RuntimeError("Project file '" + args.project + "' does not exist.")

    # Make sure batch inputs exist.
    for p in args.batch_inputs:
        print p
        error = False
        p = PathComponents(p).externalPath
        if not os.path.exists(p):
            logger.error("Batch input file does not exist: " + p)
            error = True
        if error:
            raise RuntimeError("Could not find one or more batch inputs.  See logged errors.")

    if not args.generate_project_predictions and len(args.batch_inputs) == 0:
        logger.error("Command-line arguments didn't specify a workload.")
        return

    # Instantiate 'shell'
    shell, workflow = startShellHeadless( AutocontextClassificationWorkflow )
    
    # Load project (auto-import it if necessary)
    logger.info("Opening project: '" + args.project + "'")
    shell.openProjectPath(args.project)

    # Predictions for project input datasets
    if args.generate_project_predictions:
        generateProjectPredictions(shell, workflow)

    # Predictions for other datasets ('batch datasets')
    result = True
    if len(args.batch_inputs) > 0:
        result = generateBatchPredictions(workflow,
                                          args.batch_inputs,
                                          args.batch_export_dir,
                                          args.batch_output_suffix,
                                          args.batch_output_dataset_name)

    logger.info("Closing project...")
    shell.projectManager.closeCurrentProject()
    
    assert result    
    
    logger.info("FINISHED.")