Ejemplo n.º 1
0
    def test_load_single_file_with_list(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False})
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        fileNameString = os.path.pathsep.join(self.file_names)
        info = DatasetInfo(filepath=fileNameString)
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False

        reader.Dataset.setValues([info])

        # Read the test files using the data selection operator and verify the contents
        imgData = reader.Image[0][...].wait()
        print('imgData', reader.Image.meta.axistags, reader.Image.meta.original_axistags)

        # Check raw images
        assert imgData.shape == self.imgData3Dct.shape, (imgData.shape, self.imgData3Dct.shape)

        numpy.testing.assert_array_equal(imgData, self.imgData3Dct)
Ejemplo n.º 2
0
    def testBasic3DstacksFromFileList(self):
        for ext, fileNames in list(self.imgFileLists2D.items()):
            fileNameString = os.path.pathsep.join(fileNames)
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False})
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo(filepath=fileNameString)
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData3D = reader.Image[0][...].wait()

            # Check raw images
            assert imgData3D.shape == self.imgData3D.shape, (imgData3D.shape, self.imgData3D.shape)
            # skip this if image was saved compressed:
            if any(x.strip('.') in ext.lower() for x in self.compressedExtensions):
                print("Skipping raw comparison for compressed data: {}".format(ext))
                continue
            numpy.testing.assert_array_equal(imgData3D, self.imgData3D)
        def impl():
            projFilePath = self.PROJECT_FILE
            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            for i, dataFile in enumerate(self.SAMPLE_DATA):
                # Add a file
                info = DatasetInfo()

                info.filePath = dataFile

                opDataSelection.DatasetGroup.resize(i + 1)
                opDataSelection.DatasetGroup[i][0].setValue(info)

            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array(
                [[True, False, False, False, False, False, False],
                 [True, False, False, False, False, False, False],
                 [True, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False]])
            opFeatures.SelectionMatrix.setValue(selections)
 def impl():
     projFilePath = self.PROJECT_FILE
 
     shell = self.shell
     workflow = self.workflow
     
     # New project
     shell.createAndLoadNewProject(projFilePath)
 
     # Add a file
     from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
     info = DatasetInfo()
     info.filePath = self.SAMPLE_DATA
     opDataSelection = workflow.dataSelectionApplet.topLevelOperator
     opDataSelection.Dataset.resize(1)
     opDataSelection.Dataset[0].setValue(info)
     
     # Set some features
     featureGui = workflow.featureSelectionApplet.gui
     opFeatures = workflow.featureSelectionApplet.topLevelOperator
     #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
     selections = numpy.array( [[True, False, False, False, False, False, False],
                                [True, False, False, False, False, False, False],
                                [True, False, False, False, False, False, False],
                                [False, False, False, False, False, False, False],
                                [False, False, False, False, False, False, False],
                                [False, False, False, False, False, False, False]] )
     opFeatures.SelectionMatrix.setValue(selections)
 
     # Save and close
     shell.projectManager.saveProject()
     shell.ensureNoCurrentProject(assertClean=True)
Ejemplo n.º 5
0
        def impl():
            projFilePath = self.PROJECT_FILE
            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            for i, dataFile in enumerate(self.SAMPLE_DATA):
                # Add a file
                info = DatasetInfo()
                info.filePath = dataFile
                opDataSelection.DatasetGroup.resize(i + 1)
                opDataSelection.DatasetGroup[i][0].setValue(info)

            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            opFeatures.FeatureIds.setValue(
                OpPixelFeaturesPresmoothed.DefaultFeatureIds)
            opFeatures.Scales.setValue([0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0])
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array(
                [[True, False, False, False, False, False, False],
                 [True, False, False, False, False, False, False],
                 [True, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False]])
            opFeatures.SelectionMatrix.setValue(selections)

            # Save and close
            shell.projectManager.saveProject()
            shell.ensureNoCurrentProject(assertClean=True)
Ejemplo n.º 6
0
        def impl():
            projFilePath = self.PROJECT_FILE

            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath)
            workflow = shell.projectManager.workflow

            # Add a file
            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            info = DatasetInfo()
            info.filePath = self.SAMPLE_DATA
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.Dataset.resize(1)
            opDataSelection.Dataset[0].setValue(info)

            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            opFeatures.FeatureIds.setValue(
                OpPixelFeaturesPresmoothed.DefaultFeatureIds)
            opFeatures.Scales.setValue([0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0])
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array(
                [[True, True, True, True, True, True, False],
                 [True, True, True, True, True, True, False],
                 [True, True, True, True, True, True, False],
                 [True, True, True, True, True, True, False],
                 [True, True, True, True, True, True, False],
                 [True, True, True, True, True, True, False]])

            opFeatures.SelectionMatrix.setValue(selections)
Ejemplo n.º 7
0
        def impl():
            projFilePath = self.PROJECT_FILE

            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            # Add a file
            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            info = DatasetInfo()
            info.filePath = self.SAMPLE_DATA
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.DatasetGroup.resize(1)
            opDataSelection.DatasetGroup[0][0].setValue(info)

            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array(
                [[True, False, False, False, False, False, False],
                 [True, False, False, False, False, False, False],
                 [True, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False],
                 [False, False, False, False, False, False, False]])

            opFeatures.SelectionMatrix.setValue(selections)

            # Save and close
            shell.projectManager.saveProject()
            shell.ensureNoCurrentProject(assertClean=True)
Ejemplo n.º 8
0
    def test_fake_data_source(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph,
                                 operator_kwargs={'forceAxisOrder': False})
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        info = DatasetInfo()
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.filePath = self.testRawDataFileName
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False
        # Use *fake* data source
        info.realDataSource = False
        info.axistags = vigra.defaultAxistags('tczyx')
        info.laneShape = self.imgData.shape
        info.laneDtype = self.imgData.dtype

        reader.Dataset.setValues([info])

        # Verify that now data selection operator returns fake data
        # with expected shape and type
        imgData = reader.Image[0][...].wait()

        assert imgData.shape == self.imgData.shape
        assert imgData.dtype == self.imgData.dtype
        expected_fake_data = numpy.zeros(info.laneShape, dtype=info.laneDtype)
        numpy.testing.assert_array_equal(imgData, expected_fake_data)
        def impl():
            projFilePath = self.PROJECT_FILE
        
            shell = self.shell
            
            # New project
            shell.createAndLoadNewProject(projFilePath)
            workflow = shell.projectManager.workflow
        
            # Add a file
            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            info = DatasetInfo()
            info.filePath = self.SAMPLE_DATA
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.Dataset.resize(1)
            opDataSelection.Dataset[0].setValue(info)
            
            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds )
            opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] )
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array( [[True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False]] )

            opFeatures.SelectionMatrix.setValue(selections)
Ejemplo n.º 10
0
    def _get_template_dataset_infos(self, input_axes=None):
        """
        Sometimes the default settings for an input file are not suitable (e.g. the axistags need to be changed).
        We assume the LAST non-batch input in the workflow has settings that will work for all batch processing inputs.
        Here, we get the DatasetInfo objects from that lane and store them as 'templates' to modify for all batch-processing files.
        """
        template_infos = {}

        # If there isn't an available dataset to use as a template
        if len(self.dataSelectionApplet.topLevelOperator.DatasetGroup) == 0:
            num_roles = len(self.dataSelectionApplet.topLevelOperator.DatasetRoles.value)
            for role_index in range(num_roles):
                template_infos[role_index] = DatasetInfo()
                template_infos[role_index].axistags = vigra.defaultAxistags(input_axes)
            return template_infos

        # Use the LAST non-batch input file as our 'template' for DatasetInfo settings (e.g. axistags)
        template_lane = len(self.dataSelectionApplet.topLevelOperator.DatasetGroup) - 1
        opDataSelectionTemplateView = self.dataSelectionApplet.topLevelOperator.getLane(template_lane)

        for role_index, info_slot in enumerate(opDataSelectionTemplateView.DatasetGroup):
            if info_slot.ready():
                template_infos[role_index] = info_slot.value
            else:
                template_infos[role_index] = DatasetInfo()
            if input_axes:
                # Support the --input_axes arg to override input axis order, same as DataSelection applet.
                template_infos[role_index].axistags = vigra.defaultAxistags(input_axes)
        return template_infos
        def impl():
            projFilePath = self.PROJECT_FILE
            shell = self.shell
            
            # New project
            shell.createAndLoadNewProject(projFilePath)
            workflow = shell.projectManager.workflow

            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            for i, dataFile in enumerate(self.SAMPLE_DATA):        
                # Add a file
                info = DatasetInfo()
                info.filePath = dataFile
                opDataSelection.Dataset.resize(i+1)
                opDataSelection.Dataset[i].setValue(info)
            
            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds )
            opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] )
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array( [[True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False]] )
            opFeatures.SelectionMatrix.setValue(selections)
      
            # Save and close
            shell.projectManager.saveProject()
            shell.ensureNoCurrentProject(assertClean=True)
Ejemplo n.º 12
0
    def testWeirdAxisInfos(self):
        """
        If we add a dataset that has the channel axis in the wrong place, 
        the operator should automatically transpose it to be last.
        """
        weirdAxisFilename = os.path.join(self.workingDir, 'WeirdAxes.npy')
        expected_data = numpy.random.random( (3,100,100) )
        numpy.save(weirdAxisFilename, expected_data)

        info = DatasetInfo()
        info.filePath = weirdAxisFilename
        info.axistags = vigra.defaultAxistags('cxy')
        
        graph = Graph()
        op = OpDataSelectionGroup(graph=graph, forceAxisOrder=False)
        op.WorkingDirectory.setValue( self.workingDir )
        op.DatasetRoles.setValue( ['RoleA'] )

        op.DatasetGroup.resize( 1 )
        op.DatasetGroup[0].setValue( info )

        assert op.ImageGroup[0].ready()
        
        data_from_op = op.ImageGroup[0][:].wait()
        
        assert data_from_op.dtype == expected_data.dtype 
        assert data_from_op.shape == expected_data.shape, (data_from_op.shape, expected_data.shape)
        assert (data_from_op == expected_data).all()

        # op.Image is a synonym for op.ImageGroup[0]
        assert op.Image.ready()
        assert (op.Image[:].wait() == expected_data).all()
        
        # Ensure that files opened by the inner operators are closed before we exit.
        op.DatasetGroup.resize(0)
def debug_with_new(shell, workflow):
    """
    (Function for debug and testing.)
    """
    projFilePath = "/magnetic/test_watershed_project.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)

    # Add a file
    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
    info = DatasetInfo()
    #info.filePath = '/magnetic/gigacube.h5'
    #info.filePath = '/magnetic/synapse_small.npy'
    info.filePath = '/magnetic/synapse_small.npy_results.h5'
    #info.filePath = '/magnetic/singleslice.h5'
    opDataSelection = workflow.dataSelectionApplet.topLevelOperator
    opDataSelection.Dataset.resize(1)
    opDataSelection.Dataset[0].setValue(info)

    # Select the watershed drawer
    shell.setSelectedAppletDrawer(1)

    # Save the project
    shell.onSaveProjectActionTriggered()
Ejemplo n.º 14
0
def debug_with_new(shell):
    """
    (Function for debug and testing.)
    """
    #projFilePath = "/magnetic/synapse_debug_data/object_prediction.ilp"
    projFilePath = "/magnetic/stuart_object_predictions.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)
    workflow = shell.projectManager.workflow

    # Add a file
    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo

    rawInfo = DatasetInfo()
    #rawInfo.filePath = '/magnetic/synapse_debug_data/block256.h5/cube'
    #rawInfo.filePath = '/magnetic/synapse_small_4d.h5/volume/data'
    rawInfo.filePath = '/magnetic/validation_slices_20_40_3200_4000_1200_2000.h5/volume/data'
    opRawDataSelection = workflow.rawDataSelectionApplet.topLevelOperator
    opRawDataSelection.Dataset.resize(1)
    opRawDataSelection.Dataset[0].setValue(rawInfo)

    predictionInfo = DatasetInfo()
    #predictionInfo.filePath = '/magnetic/synapse_debug_data/block256_spots_predictions.h5/cube'
    #predictionInfo.filePath = '/magnetic/synapse_small_4d_synapse_predictions.h5/volume/data'
    predictionInfo.filePath = '/magnetic/validation_slices_20_40_3200_4000_1200_2000_pred.h5/volume/data'
    opPredDataSelection = workflow.predictionSelectionApplet.topLevelOperator
    opPredDataSelection.Dataset.resize(1)
    opPredDataSelection.Dataset[0].setValue(predictionInfo)

    # Select the feature drawer
    shell.setSelectedAppletDrawer(2)
def debug_with_new(shell, workflow):
    """
    (Function for debug and testing.)
    """
    projFilePath = "/magnetic/test_watershed_project.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)

    # Add a file
    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
    info = DatasetInfo()
    #info.filePath = '/magnetic/gigacube.h5'
    #info.filePath = '/magnetic/synapse_small.npy'
    info.filePath = '/magnetic/synapse_small.npy_results.h5'
    #info.filePath = '/magnetic/singleslice.h5'
    opDataSelection = workflow.dataSelectionApplet.topLevelOperator
    opDataSelection.Dataset.resize(1)
    opDataSelection.Dataset[0].setValue(info)

    # Select the watershed drawer
    shell.setSelectedAppletDrawer(1)

    # Save the project
    shell.onSaveProjectActionTriggered()
        def impl():
            projFilePath = self.PROJECT_FILE
            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            for i, dataFile in enumerate(self.SAMPLE_DATA):
                # Add a file
                info = DatasetInfo()

                info.filePath = dataFile


                opDataSelection.DatasetGroup.resize(i+1)
                opDataSelection.DatasetGroup[i][0].setValue(info)

            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array( [[True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False]] )
            opFeatures.SelectionMatrix.setValue(selections)
    def create_new_tst_project(cls):
        # Instantiate 'shell'
        shell = HeadlessShell(  )
        
        # Create a blank project file and load it.
        newProjectFilePath = cls.PROJECT_FILE
        newProjectFile = ProjectManager.createBlankProjectFile(newProjectFilePath, PixelClassificationWorkflow, [])
        newProjectFile.close()
        shell.openProjectFile(newProjectFilePath)
        workflow = shell.workflow
        
        # Add a file
        from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
        info = DatasetInfo()
        info.filePath = cls.SAMPLE_DATA
        opDataSelection = workflow.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetGroup.resize(1)
        opDataSelection.DatasetGroup[0][0].setValue(info)
        
        
        # Set some features
        ScalesList = [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0]    
        FeatureIds = [ 'GaussianSmoothing',
                       'LaplacianOfGaussian',
                       'StructureTensorEigenvalues',
                       'HessianOfGaussianEigenvalues',
                       'GaussianGradientMagnitude',
                       'DifferenceOfGaussians' ]

        opFeatures = workflow.featureSelectionApplet.topLevelOperator
        opFeatures.Scales.setValue( ScalesList )
        opFeatures.FeatureIds.setValue( FeatureIds )

        #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
        selections = numpy.array( [[True, False, False, False, False, False, False],
                                   [True, False, False, False, False, False, False],
                                   [True, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False]] )
        opFeatures.SelectionMatrix.setValue(selections)
    
        # Add some labels directly to the operator
        opPixelClass = workflow.pcApplet.topLevelOperator

        opPixelClass.LabelNames.setValue(['Label 1', 'Label 2'])

        slicing1 = sl[0:1,0:10,0:10,0:1,0:1]
        labels1 = 1 * numpy.ones(slicing2shape(slicing1), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing1] = labels1

        slicing2 = sl[0:1,0:10,10:20,0:1,0:1]
        labels2 = 2 * numpy.ones(slicing2shape(slicing2), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing2] = labels2

        # Save and close
        shell.projectManager.saveProject()
        del shell
Ejemplo n.º 18
0
    def testCreateExportDirectory(self):
        """
        Test that the batch operator can create the export directory if it doesn't exist yet.
        """
        # Start by writing some test data to disk.
        self.testData = numpy.random.random((1, 10, 10, 10, 1))
        numpy.save(self.testDataFileName, self.testData)

        cwd = os.getcwd()
        info = DatasetInfo()
        info.filePath = os.path.join(cwd, 'NpyTestData.npy')

        graph = Graph()
        opBatchIo = OpBatchIo(graph=graph)
        opInput = OpInputDataReader(graph=graph)
        opInput.FilePath.setValue(info.filePath)

        # Our test "processing pipeline" is just a smoothing operator.
        opSmooth = OpGaussianSmoothing(graph=graph)
        opSmooth.Input.connect(opInput.Output)
        opSmooth.sigma.setValue(3.0)

        exportDir = os.path.join(cwd, 'exported_data')
        opBatchIo.ExportDirectory.setValue(exportDir)
        opBatchIo.Suffix.setValue('_smoothed')
        opBatchIo.Format.setValue(ExportFormat.H5)
        opBatchIo.DatasetPath.setValue(info.filePath)

        internalPath = 'path/to/data'
        opBatchIo.InternalPath.setValue(internalPath)

        opBatchIo.ImageToExport.connect(opSmooth.Output)

        dirty = opBatchIo.Dirty.value
        assert dirty == True

        outputPath = opBatchIo.OutputDataPath.value
        assert outputPath == os.path.join(exportDir, 'NpyTestData_smoothed.h5',
                                          internalPath)

        result = opBatchIo.ExportResult.value
        assert result

        dirty = opBatchIo.Dirty.value
        assert dirty == False

        # Check the file
        smoothedPath = PathComponents(outputPath).externalPath
        with h5py.File(smoothedPath, 'r') as f:
            assert internalPath in f
            assert f[internalPath].shape == self.testData.shape
        try:
            os.remove(smoothedPath)
            os.rmdir(exportDir)
        except:
            pass
    def create_new_tst_project(cls):
        # Instantiate 'shell'
        shell = HeadlessShell()

        # Create a blank project file and load it.
        newProjectFilePath = cls.PROJECT_FILE
        newProjectFile = ProjectManager.createBlankProjectFile(
            newProjectFilePath, PixelClassificationWorkflow, [])
        newProjectFile.close()
        shell.openProjectFile(newProjectFilePath)
        workflow = shell.workflow

        # Add a file
        from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
        info = DatasetInfo()
        info.filePath = cls.SAMPLE_DATA
        opDataSelection = workflow.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetGroup.resize(1)
        opDataSelection.DatasetGroup[0][0].setValue(info)

        # Set some features
        ScalesList = [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0]
        FeatureIds = [
            'GaussianSmoothing', 'LaplacianOfGaussian',
            'StructureTensorEigenvalues', 'HessianOfGaussianEigenvalues',
            'GaussianGradientMagnitude', 'DifferenceOfGaussians'
        ]

        opFeatures = workflow.featureSelectionApplet.topLevelOperator
        opFeatures.Scales.setValue(ScalesList)
        opFeatures.FeatureIds.setValue(FeatureIds)

        #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
        selections = numpy.array(
            [[True, False, False, False, False, False, False],
             [True, False, False, False, False, False, False],
             [True, False, False, False, False, False, False],
             [False, False, False, False, False, False, False],
             [False, False, False, False, False, False, False],
             [False, False, False, False, False, False, False]])
        opFeatures.SelectionMatrix.setValue(selections)

        # Add some labels directly to the operator
        opPixelClass = workflow.pcApplet.topLevelOperator

        slicing1 = sl[0:1, 0:10, 0:10, 0:1, 0:1]
        labels1 = 1 * numpy.ones(slicing2shape(slicing1), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing1] = labels1

        slicing2 = sl[0:1, 0:10, 10:20, 0:1, 0:1]
        labels2 = 2 * numpy.ones(slicing2shape(slicing2), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing2] = labels2

        # Save and close
        shell.projectManager.saveProject()
        del shell
Ejemplo n.º 20
0
    def testCreateExportDirectory(self):
        """
        Test that the batch operator can create the export directory if it doesn't exist yet.
        """
        # Start by writing some test data to disk.
        self.testData = numpy.random.random((1,10,10,10,1))
        numpy.save(self.testDataFileName, self.testData)

        cwd = os.getcwd()
        info = DatasetInfo()
        info.filePath = os.path.join(cwd, 'NpyTestData.npy')
        
        graph = Graph()
        opBatchIo = OpBatchIo(graph=graph)
        opInput = OpInputDataReader(graph=graph)
        opInput.FilePath.setValue( info.filePath )
        
        # Our test "processing pipeline" is just a smoothing operator.
        opSmooth = OpGaussianSmoothing(graph=graph)
        opSmooth.Input.connect( opInput.Output )
        opSmooth.sigma.setValue(3.0)
        
        exportDir = os.path.join(cwd, 'exported_data')
        opBatchIo.ExportDirectory.setValue( exportDir )
        opBatchIo.Suffix.setValue( '_smoothed' )
        opBatchIo.Format.setValue( ExportFormat.H5 )
        opBatchIo.DatasetPath.setValue( info.filePath )
        opBatchIo.WorkingDirectory.setValue( cwd )
        
        internalPath = 'path/to/data'
        opBatchIo.InternalPath.setValue( internalPath )
        
        opBatchIo.ImageToExport.connect( opSmooth.Output )
        
        dirty = opBatchIo.Dirty.value
        assert dirty == True
        
        outputPath = opBatchIo.OutputDataPath.value
        assert outputPath == os.path.join(exportDir, 'NpyTestData_smoothed.h5', internalPath)
        
        result = opBatchIo.ExportResult.value
        assert result
        
        dirty = opBatchIo.Dirty.value
        assert dirty == False
        
        # Check the file
        smoothedPath = PathComponents(outputPath).externalPath
        with h5py.File(smoothedPath, 'r') as f:
            assert internalPath in f
            assert f[internalPath].shape == self.testData.shape
        try:
            os.remove(smoothedPath)
            os.rmdir(exportDir)
        except:
            pass
Ejemplo n.º 21
0
        def impl():
            projFilePath = self.PROJECT_FILE

            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            # Add a file
            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo

            info = DatasetInfo()
            info.filePath = self.SAMPLE_DATA
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.DatasetGroup.resize(1)
            opDataSelection.DatasetGroup[0][0].setValue(info)

            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array(
                [
                    [True, False, False, False, False, False, False],
                    [True, False, False, False, False, False, False],
                    [True, False, False, False, False, False, False],
                    [False, False, False, False, False, False, False],
                    [False, False, False, False, False, False, False],
                    [False, False, False, False, False, False, False],
                ]
            )

            opFeatures.SelectionMatrix.setValue(selections)

            workflow = self.shell.projectManager.workflow
            countingClassApplet = workflow.countingApplet
            gui = countingClassApplet.getMultiLaneGui()
            opCount = countingClassApplet.topLevelOperator

            opCount.opTrain.Sigma.setValue(self.COUNTING_SIGMA)

            # Select the labeling drawer
            self.shell.setSelectedAppletDrawer(COUNTING_APPLET_INDEX)

            # Turn off the huds and so we can capture the raw image
            viewMenu = gui.currentGui().menus()[0]
            viewMenu.actionToggleAllHuds.trigger()

            # Save and close
            shell.projectManager.saveProject()
            shell.ensureNoCurrentProject(assertClean=True)
Ejemplo n.º 22
0
 def basicImpl(self):
     cwd = os.getcwd()
     info = DatasetInfo()
     info.filePath = os.path.join(cwd, self.testDataFileName)
     
     graph = Graph()
     opBatchIo = OpBatchIo(graph=graph)
     opInput = OpInputDataReader(graph=graph)
     opInput.FilePath.setValue( info.filePath )
     
     # Our test "processing pipeline" is just a smoothing operator.
     opSmooth = OpGaussianSmoothing(graph=graph)
     opSmooth.Input.connect( opInput.Output )
     opSmooth.sigma.setValue(3.0)
     
     opBatchIo.ExportDirectory.setValue( '' )
     opBatchIo.Suffix.setValue( '_smoothed' )
     opBatchIo.Format.setValue( ExportFormat.H5 )
     opBatchIo.DatasetPath.setValue( info.filePath )
     opBatchIo.WorkingDirectory.setValue( cwd )
     
     internalPath = 'path/to/data'
     opBatchIo.InternalPath.setValue( internalPath )
     
     opBatchIo.ImageToExport.connect( opSmooth.Output )
     
     dirty = opBatchIo.Dirty.value
     assert dirty == True
     
     outputPath = opBatchIo.OutputDataPath.value
     assert outputPath == os.path.join(cwd, 'NpyTestData_smoothed.h5/' + internalPath)
     
     result = opBatchIo.ExportResult.value
     assert result
     
     dirty = opBatchIo.Dirty.value
     assert dirty == False
     
     # Check the file
     smoothedPath = os.path.join(cwd, 'NpyTestData_smoothed.h5')
     with h5py.File(smoothedPath, 'r') as f:
         assert internalPath in f
         assert f[internalPath].shape == self.expectedDataShape
         assert (f[internalPath][:] == opSmooth.Output[:].wait()).all()
     try:
         os.remove(smoothedPath)
     except:
         pass
     
     # Check the exported image
     assert ( opBatchIo.ExportedImage[:].wait() == opSmooth.Output[:].wait() ).all()
Ejemplo n.º 23
0
    def testBasic3DWrongAxes(self):
        """Test if 3D file with intentionally wrong axes is rejected """
        for fileName in self.imgFileNames3D:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph)
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False
            info.axistags = vigra.defaultAxistags('tzyc')

            try:
                reader.Dataset.setValues([info])
                assert False, "Should have thrown an exception!"
            except DatasetConstraintError:
                pass
            except:
                assert False, "Should have thrown a DatasetConstraintError!"
Ejemplo n.º 24
0
 def loadProject(shell, workflow):
     if not os.path.exists(projectFilename):
         shell.createAndLoadNewProject(projectFilename)
     else:
         shell.openProjectFile(projectFilename)
     workflow.setCarvingGraphFile(carvingGraphFilename)
     # Add a file
     from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
     info = DatasetInfo()
     info.filePath = carvingGraphFilename + "/graph/raw"
     opDataSelection = workflow.dataSelectionApplet.topLevelOperator
     opDataSelection.Dataset.resize(1)
     opDataSelection.Dataset[0].setValue(info)
     shell.setSelectedAppletDrawer(2)
        def impl():
            projFilePath = self.PROJECT_FILE
         
            shell = self.shell
             
            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow
         
            # Add a file
            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            info = DatasetInfo()
            info.filePath = self.SAMPLE_DATA
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.DatasetGroup.resize(1)
            opDataSelection.DatasetGroup[0][0].setValue(info)
             
            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds )
            opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] )
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array( [[True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False]] )
 
            opFeatures.SelectionMatrix.setValue(selections)
            #shell.setSelectedAppletDrawer(5)
            
            
            workflow = self.shell.projectManager.workflow
            countingClassApplet = workflow.countingApplet
            gui = countingClassApplet.getMultiLaneGui()
            opCount = countingClassApplet.topLevelOperator
 
            # Select the labeling drawer
            self.shell.setSelectedAppletDrawer(3)
             
            # Turn off the huds and so we can capture the raw image
            viewMenu = gui.currentGui().menus()[0]
            viewMenu.actionToggleAllHuds.trigger()
        

            # Save and close
            shell.projectManager.saveProject()
            shell.ensureNoCurrentProject(assertClean=True)
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName):
    """
    Compute the predictions for each of the specified batch input files,
    and export them to corresponding h5 files.
    """
    batchInputPaths = convertStacksToH5(batchInputPaths)

    batchInputInfos = []
    for p in batchInputPaths:
        info = DatasetInfo()
        info.location = DatasetInfo.Location.FileSystem

        # Convert all paths to absolute 
        # (otherwise they are relative to the project file, which probably isn't what the user meant)        
        comp = PathComponents(p)
        comp.externalPath = os.path.abspath(comp.externalPath)
        
        info.filePath = comp.totalPath()        
        batchInputInfos.append(info)

    # Configure batch input operator
    opBatchInputs = workflow.batchInputApplet.topLevelOperator
    opBatchInputs.Dataset.setValues( batchInputInfos )
    
    # Configure batch export operator
    opBatchResults = workflow.batchResultsApplet.topLevelOperator
    opBatchResults.ExportDirectory.setValue(batchExportDir)
    opBatchResults.Format.setValue(ExportFormat.H5)
    opBatchResults.Suffix.setValue(batchOutputSuffix)
    opBatchResults.InternalPath.setValue(exportedDatasetName)
    opBatchResults.SelectedSlices.setValue([30])
    
    logger.info( "Exporting data to " + opBatchResults.OutputDataPath[0].value )

    # Set up progress display handling (just logging for now)        
    currentProgress = [None]
    def handleProgress(percentComplete):
        if currentProgress[0] != percentComplete:
            currentProgress[0] = percentComplete
            logger.info("Batch job: {}% complete.".format(percentComplete))
        
    progressSignal = opBatchResults.ProgressSignal[0].value
    progressSignal.subscribe( handleProgress )

    # Make it happen!
    result = opBatchResults.ExportResult[0].value
    return result
Ejemplo n.º 27
0
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName):
    """
    Compute the predictions for each of the specified batch input files,
    and export them to corresponding h5 files.
    """
    batchInputPaths = convertStacksToH5(batchInputPaths)

    batchInputInfos = []
    for p in batchInputPaths:
        info = DatasetInfo()
        info.location = DatasetInfo.Location.FileSystem

        # Convert all paths to absolute 
        # (otherwise they are relative to the project file, which probably isn't what the user meant)        
        comp = PathComponents(p)
        comp.externalPath = os.path.abspath(comp.externalPath)
        
        info.filePath = comp.totalPath()        
        batchInputInfos.append(info)

    # Configure batch input operator
    opBatchInputs = workflow.batchInputApplet.topLevelOperator
    opBatchInputs.Dataset.setValues( batchInputInfos )
    
    # Configure batch export operator
    opBatchResults = workflow.batchResultsApplet.topLevelOperator
    opBatchResults.ExportDirectory.setValue(batchExportDir)
    opBatchResults.Format.setValue(ExportFormat.H5)
    opBatchResults.Suffix.setValue(batchOutputSuffix)
    opBatchResults.InternalPath.setValue(exportedDatasetName)
    opBatchResults.SelectedSlices.setValue([30])
    
    logger.info( "Exporting data to " + opBatchResults.OutputDataPath[0].value )

    # Set up progress display handling (just logging for now)        
    currentProgress = [None]
    def handleProgress(percentComplete):
        if currentProgress[0] != percentComplete:
            currentProgress[0] = percentComplete
            logger.info("Batch job: {}% complete.".format(percentComplete))
        
    progressSignal = opBatchResults.ProgressSignal[0].value
    progressSignal.subscribe( handleProgress )

    # Make it happen!
    result = opBatchResults.ExportResult[0].value
    return result
Ejemplo n.º 28
0
    def basicImpl(self):
        cwd = os.getcwd()
        info = DatasetInfo()
        info.filePath = os.path.join(cwd, 'NpyTestData.npy')

        graph = Graph()
        opBatchIo = OpBatchIo(graph=graph)
        opInput = OpInputDataReader(graph=graph)
        opInput.FilePath.setValue(info.filePath)

        # Our test "processing pipeline" is just a smoothing operator.
        opSmooth = OpGaussianSmoothing(graph=graph)
        opSmooth.Input.connect(opInput.Output)
        opSmooth.sigma.setValue(3.0)

        opBatchIo.ExportDirectory.setValue('')
        opBatchIo.Suffix.setValue('_smoothed')
        opBatchIo.Format.setValue(ExportFormat.H5)
        opBatchIo.DatasetPath.setValue(info.filePath)

        internalPath = 'path/to/data'
        opBatchIo.InternalPath.setValue(internalPath)

        opBatchIo.ImageToExport.connect(opSmooth.Output)

        dirty = opBatchIo.Dirty.value
        assert dirty == True

        outputPath = opBatchIo.OutputDataPath.value
        assert outputPath == os.path.join(
            cwd, 'NpyTestData_smoothed.h5/' + internalPath)

        result = opBatchIo.ExportResult.value
        assert result

        dirty = opBatchIo.Dirty.value
        assert dirty == False

        # Check the file
        smoothedPath = os.path.join(cwd, 'NpyTestData_smoothed.h5')
        with h5py.File(smoothedPath, 'r') as f:
            assert internalPath in f
            assert f[internalPath].shape == self.expectedDataShape
        try:
            os.remove(smoothedPath)
        except:
            pass
Ejemplo n.º 29
0
        def impl():
            projFilePath = self.project_file
            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            # Add our input files:
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.DatasetGroup.resize(1)
            info_raw = DatasetInfo()
            info_raw.filePath = self.sample_data_raw
            opDataSelection.DatasetGroup[0][0].setValue(info_raw)

            # Save
            shell.projectManager.saveProject()
Ejemplo n.º 30
0
        def impl():
            projFilePath = self.project_file
            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            # Add our input files:
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.DatasetGroup.resize(1)
            info_raw = DatasetInfo()
            info_raw.filePath = self.sample_data_raw
            opDataSelection.DatasetGroup[0][0].setValue(info_raw)

            # Save
            shell.projectManager.saveProject()
Ejemplo n.º 31
0
    def testBasic3DWrongAxes(self):
        """Test if 3D file with intentionally wrong axes is rejected """
        for fileName in self.imgFileNames3D:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False})
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False
            info.axistags = vigra.defaultAxistags('tzyc')

            try:
                reader.Dataset.setValues([info])
                assert False, "Should have thrown an exception!"
            except DatasetConstraintError:
                pass
            except:
                assert False, "Should have thrown a DatasetConstraintError!"
Ejemplo n.º 32
0
    def test_real_data_source(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph,
                                 operator_kwargs={'forceAxisOrder': False})
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        info = DatasetInfo()
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.filePath = self.testRawDataFileName
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False
        #Use real data source
        info.realDataSource = True

        reader.Dataset.setValues([info])

        # Read the test file using the data selection operator and verify the contents
        imgData = reader.Image[0][...].wait()

        assert imgData.shape == self.imgData.shape
        numpy.testing.assert_array_equal(imgData, self.imgData)
Ejemplo n.º 33
0
    def testBasic2D(self):
        """Test if plane 2d files are loaded correctly"""
        for fileName in self.imgFileNames2D:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection,
                                     graph=graph,
                                     operator_kwargs={'forceAxisOrder': False})
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData2D = reader.Image[0][...].wait()

            # Check the file name output
            assert reader.ImageName[0].value == fileName
            # Check raw images
            assert imgData2D.shape == self.imgData2D.shape
            # skip this if image was saved compressed:
            if any(x in fileName.lower() for x in self.compressedExtensions):
                print("Skipping raw comparison for compressed data: {}".format(
                    fileName))
                continue
            numpy.testing.assert_array_equal(imgData2D, self.imgData2D)
Ejemplo n.º 34
0
    def _run_export_with_empty_batch_lane(self, role_input_paths, batch_lane_index, template_infos, progress_callback):
        """
        Configure the fresh batch lane with the given input files, and export the results.
        """
        assert role_input_paths[0], "At least one file must be provided for each dataset (the first role)."
        opDataSelectionBatchLaneView = self.dataSelectionApplet.topLevelOperator.getLane( batch_lane_index )

        # Apply new settings for each role
        for role_index, path_for_role in enumerate(role_input_paths):
            if not path_for_role:
                continue

            if template_infos[role_index]:
                info = copy.copy(template_infos[role_index])
            else:
                info = DatasetInfo()

            # Override the template settings with the current filepath.
            default_info = DataSelectionApplet.create_default_headless_dataset_info(path_for_role)
            info.filePath = default_info.filePath
            info.location = default_info.location
            info.nickname = default_info.nickname

            # Apply to the data selection operator
            opDataSelectionBatchLaneView.DatasetGroup[role_index].setValue(info)

        # Make sure nothing went wrong
        opDataExportBatchlaneView = self.dataExportApplet.topLevelOperator.getLane( batch_lane_index )
        assert opDataExportBatchlaneView.ImageToExport.ready()
        assert opDataExportBatchlaneView.ExportPath.ready()
        
        # New lanes were added.
        # Give the workflow a chance to restore anything that was unecessarily invalidated (e.g. classifiers)
        self.workflow.handleNewLanesAdded()
        
        # Call customization hook
        self.dataExportApplet.prepare_lane_for_export(batch_lane_index)

        # Finally, run the export
        logger.info("Exporting to {}".format( opDataExportBatchlaneView.ExportPath.value ))
        opDataExportBatchlaneView.progressSignal.subscribe(progress_callback)
        opDataExportBatchlaneView.run_export()

        # Call customization hook
        self.dataExportApplet.post_process_lane_export(batch_lane_index)
Ejemplo n.º 35
0
    def testBasic3DcStackFromGlobString(self):
        """Test if stacked 2d 3-channel files are loaded correctly"""
        # For some reason vigra saves 2D+c data compressed in gifs, so skip!
        self.compressedExtensions.append('.gif')
        for fileName in self.imgFileNameGlobs2Dc:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph)
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData3Dc = reader.Image[0][...].wait()

            # Check the file name output
            assert reader.ImageName[0].value == fileName
            # Check raw images
            assert imgData3Dc.shape == self.imgData3Dc.shape
            # skip this if image was saved compressed:
            if any(x in fileName.lower() for x in self.compressedExtensions):
                print("Skipping raw comparison for compressed data: {}".format(
                    fileName))
                continue
            numpy.testing.assert_array_equal(imgData3Dc, self.imgData3Dc)
Ejemplo n.º 36
0
    def testProjectLocalData(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph)
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        # Create a list of dataset infos . . .
        datasetInfos = []

        # From project
        info = DatasetInfo()
        info.location = DatasetInfo.Location.ProjectInternal
        info.filePath = "This string should be ignored..."
        info._datasetId = 'dataset1'  # (Cheating a bit here...)
        info.invertColors = False
        info.convertToGrayscale = False
        datasetInfos.append(info)

        reader.Dataset.setValues(datasetInfos)

        projectInternalData = reader.Image[0][...].wait()

        assert projectInternalData.shape == self.imgData3Dc.shape
        assert (projectInternalData == self.imgData3Dc).all()
Ejemplo n.º 37
0
    def testBasic3D(self):
        """Test if plane 2d files are loaded correctly"""
        for fileName in self.imgFileNames3D:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph)
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData3D = reader.Image[0][...].wait()

            # Check the file name output
            assert reader.ImageName[0].value == fileName
            # Check raw images
            assert imgData3D.shape == self.imgData3D.shape
            # skip this if image was saved compressed:
            numpy.testing.assert_array_equal(imgData3D, self.imgData3D)
Ejemplo n.º 38
0
def debug_with_new(shell, workflow):
    """
    (Function for debug and testing.)
    """
    projFilePath = "/magnetic/test_project.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)

    # Add a file
    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
    info = DatasetInfo()
    info.filePath = '/magnetic/gigacube.h5'
    #info.filePath = '/magnetic/synapse_small.npy'
    #info.filePath = '/magnetic/singleslice.h5'
    opDataSelection = workflow.dataSelectionApplet.topLevelOperator
    opDataSelection.Dataset.resize(1)
    opDataSelection.Dataset[0].setValue(info)

    # Set some features
    import numpy
    featureGui = workflow.featureSelectionApplet.gui
    opFeatures = workflow.featureSelectionApplet.topLevelOperator
    #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
    #    selections = numpy.array( [[True, True, True,  True, True, True, True],
    #                               [True, True, True,  True, True, True, True],
    #                               [True, True, True,  True, True, True, True],
    #                               [True, True, True,  True, True, True, True],
    #                               [True, True, True,  True, True, True, True],
    #                               [True, True, True,  True, True, True, True]] )
    selections = numpy.array(
        [[True, False, False, False, False, False, False],
         [False, False, False, False, False, False, False],
         [False, False, False, False, False, False, False],
         [False, False, False, False, False, False, False],
         [False, False, False, False, False, False, False],
         [False, False, False, False, False, False, False]])
    opFeatures.SelectionMatrix.setValue(selections)

    # Select the feature drawer
    shell.setSelectedAppletDrawer(2)

    # Save the project
    shell.onSaveProjectActionTriggered()
def debug_with_new(shell, workflow):
    """
    (Function for debug and testing.)
    """
    projFilePath = "/magnetic/test_project.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)

    # Add a file
    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
    info = DatasetInfo()
    info.filePath = '/magnetic/gigacube.h5'
    #info.filePath = '/magnetic/synapse_small.npy'
    #info.filePath = '/magnetic/singleslice.h5'
    opDataSelection = workflow.dataSelectionApplet.topLevelOperator
    opDataSelection.Dataset.resize(1)
    opDataSelection.Dataset[0].setValue(info)
    
    # Set some features
    import numpy
    featureGui = workflow.featureSelectionApplet.gui
    opFeatures = workflow.featureSelectionApplet.topLevelOperator
    #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
#    selections = numpy.array( [[True, True, True,  True, True, True, True],
#                               [True, True, True,  True, True, True, True],
#                               [True, True, True,  True, True, True, True],
#                               [True, True, True,  True, True, True, True],
#                               [True, True, True,  True, True, True, True],
#                               [True, True, True,  True, True, True, True]] )
    selections = numpy.array( [[True, False, False, False, False, False, False],
                               [False, False, False, False, False, False, False],
                               [False, False, False, False, False, False, False],
                               [False, False, False, False, False, False, False],
                               [False, False, False, False, False, False, False],
                               [False, False, False, False, False, False, False]] )
    opFeatures.SelectionMatrix.setValue(selections)

    # Select the feature drawer
    shell.setSelectedAppletDrawer(2)

    # Save the project
    shell.onSaveProjectActionTriggered()
Ejemplo n.º 40
0
    def test(self):
        """
        Make sure that the dataset roles work the way we expect them to.
        """
        infoA = DatasetInfo()
        infoA.filePath = self.group1Data[0][0]
        
        infoC = DatasetInfo()
        infoC.filePath = self.group1Data[1][0]
        
        graph = Graph()
        op = OpDataSelectionGroup( graph=graph )
        op.WorkingDirectory.setValue( self.workingDir )
        op.DatasetRoles.setValue( ['RoleA', 'RoleB', 'RoleC'] )

        op.DatasetGroup.resize( 3 )
        op.DatasetGroup[0].setValue( infoA )
        # Leave RoleB blank -- datasets other than the first are optional
        op.DatasetGroup[2].setValue( infoC )

        assert op.ImageGroup[0].ready()
        assert op.ImageGroup[2].ready()
        
        expectedDataA = self.group1Data[0][1]
        dataFromOpA = op.ImageGroup[0][:].wait()
        
        assert dataFromOpA.dtype == expectedDataA.dtype 
        assert dataFromOpA.shape == expectedDataA.shape         
        assert (dataFromOpA == expectedDataA).all()

        expectedDataC = self.group1Data[0][1]
        dataFromOpC = op.ImageGroup[0][:].wait()
        
        assert dataFromOpC.dtype == expectedDataC.dtype 
        assert dataFromOpC.shape == expectedDataC.shape         
        assert (dataFromOpC == expectedDataC).all()

        assert op.Image.ready()
        assert (op.Image[:].wait() == expectedDataA).all()
        
        # Ensure that files opened by the inner operators are closed before we exit.
        op.DatasetGroup.resize(0)
Ejemplo n.º 41
0
    def _test_stack_along(self, name, extension, sequence_axis, expected):
        fileName = os.path.join(self.tmpdir, f'{name}{extension}')
        graph = lazyflow.graph.Graph()
        reader = OpDataSelection(graph=graph, forceAxisOrder=False)
        reader.WorkingDirectory.setValue(os.getcwd())
        info = DatasetInfo(fileName, sequence_axis=sequence_axis)
        reader.Dataset.setValue(info)
        read = reader.Image[...].wait()

        assert numpy.allclose(
            read, expected), f'{name}: {read.shape}, {expected.shape}'
Ejemplo n.º 42
0
    def testBasic3DcStackFromGlobString(self):
        """Test if stacked 2d 3-channel files are loaded correctly"""
        # For some reason vigra saves 2D+c data compressed in gifs, so skip!
        self.compressedExtensions.append('.gif')
        for fileName in self.imgFileNameGlobs2Dc:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False})
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData3Dc = reader.Image[0][...].wait()

            # Check the file name output
            assert reader.ImageName[0].value == fileName
            # Check raw images
            assert imgData3Dc.shape == self.imgData3Dc.shape, (imgData3Dc.shape, self.imgData3Dc.shape)
            # skip this if image was saved compressed:
            if any(x in fileName.lower() for x in self.compressedExtensions):
                print("Skipping raw comparison for compressed data: {}".format(fileName))
                continue
            numpy.testing.assert_array_equal(imgData3Dc, self.imgData3Dc)
Ejemplo n.º 43
0
    def testProjectLocalData(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper( OpDataSelection, graph=graph )
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue( os.getcwd() )
        reader.ProjectDataGroup.setValue( 'DataSelection/local_data' )
        
        # Create a list of dataset infos . . .
        datasetInfos = []

        # From project
        info = DatasetInfo()
        info.location = DatasetInfo.Location.ProjectInternal
        info.filePath = "This string should be ignored..."
        info._datasetId = 'dataset1' # (Cheating a bit here...)
        info.invertColors = False
        info.convertToGrayscale = False
        datasetInfos.append(info)

        reader.Dataset.setValues(datasetInfos)

        projectInternalData = reader.Image[0][...].wait()
        
        assert projectInternalData.shape == self.pngData.shape
        assert (projectInternalData == self.pngData).all()
Ejemplo n.º 44
0
    def testBasic3D(self):
        """Test if plane 2d files are loaded correctly"""
        for fileName in self.imgFileNames3D:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False})
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData3D = reader.Image[0][...].wait()

            # Check the file name output
            assert reader.ImageName[0].value == fileName
            # Check raw images
            assert imgData3D.shape == self.imgData3D.shape, (imgData3D.shape, self.imgData3D.shape)
            # skip this if image was saved compressed:
            numpy.testing.assert_array_equal(imgData3D, self.imgData3D)
Ejemplo n.º 45
0
    def testBasic3DstacksFromFileList(self):
        for ext, fileNames in self.imgFileLists2D.items():
            fileNameString = os.path.pathsep.join(fileNames)
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph)
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo(filepath=fileNameString)
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData3D = reader.Image[0][...].wait()

            # Check raw images
            assert imgData3D.shape == self.imgData3D.shape
            # skip this if image was saved compressed:
            if any(
                    x.strip('.') in ext.lower()
                    for x in self.compressedExtensions):
                print("Skipping raw comparison for compressed data: {}".format(
                    ext))
                continue
            numpy.testing.assert_array_equal(imgData3D, self.imgData3D)
Ejemplo n.º 46
0
    def testBasic2D(self):
        """Test if plane 2d files are loaded correctly"""
        for fileName in self.imgFileNames2D:
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph)
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo()
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.filePath = fileName
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData2D = reader.Image[0][...].wait()

            # Check the file name output
            assert reader.ImageName[0].value == fileName
            # Check raw images
            assert imgData2D.shape == self.imgData2D.shape
            # skip this if image was saved compressed:
            if any(x in fileName.lower() for x in self.compressedExtensions):
                print("Skipping raw comparison for compressed data: {}".format(fileName))
                continue
            numpy.testing.assert_array_equal(imgData2D, self.imgData2D)
Ejemplo n.º 47
0
    def test_load_single_file_with_list(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection,
                                 graph=graph,
                                 operator_kwargs={'forceAxisOrder': False})
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        fileNameString = os.path.pathsep.join(self.file_names)
        info = DatasetInfo(filepath=fileNameString)
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False

        reader.Dataset.setValues([info])

        # Read the test files using the data selection operator and verify the contents
        imgData = reader.Image[0][...].wait()
        print('imgData', reader.Image.meta.axistags,
              reader.Image.meta.original_axistags)

        # Check raw images
        assert imgData.shape == self.imgData3Dct.shape, (
            imgData.shape, self.imgData3Dct.shape)

        numpy.testing.assert_array_equal(imgData, self.imgData3Dct)
Ejemplo n.º 48
0
    def test_load_single_file_with_glob(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph)
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        info = DatasetInfo(filepath=self.glob_string)
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False

        reader.Dataset.setValues([info])

        # Read the test files using the data selection operator and verify the contents
        imgData = reader.Image[0][...].wait()

        # Check raw images
        assert imgData.shape == self.imgData3Dct.shape

        numpy.testing.assert_array_equal(imgData, self.imgData3Dct)
Ejemplo n.º 49
0
    def testNoChannelAxis(self):
        """
        If we add a dataset that is missing a channel axis altogether, 
        the operator should automatically append a channel axis.
        """
        noChannelFilename = os.path.join(self.workingDir, 'NoChannelAxis.npy')
        noChannelData = numpy.random.random( (100,100) )
        numpy.save(noChannelFilename, noChannelData)

        info = DatasetInfo()
        info.filePath = noChannelFilename
        info.axistags = vigra.defaultAxistags('xy')
        
        graph = Graph()
        op = OpDataSelectionGroup( graph=graph )
        op.WorkingDirectory.setValue( self.workingDir )
        op.DatasetRoles.setValue( ['RoleA'] )

        op.DatasetGroup.resize( 1 )
        op.DatasetGroup[0].setValue( info )

        assert op.ImageGroup[0].ready()
        
        # Note that we expect a channel axis to be appended to the data.
        expected_data = noChannelData[:,:,numpy.newaxis]
        data_from_op = op.ImageGroup[0][:].wait()
        
        assert data_from_op.dtype == expected_data.dtype 
        assert data_from_op.shape == expected_data.shape
        assert (data_from_op == expected_data).all()

        # op.Image is a synonym for op.ImageGroup[0]
        assert op.Image.ready()
        assert (op.Image[:].wait() == expected_data).all()
        
        # Ensure that files opened by the inner operators are closed before we exit.
        op.DatasetGroup.resize(0)
Ejemplo n.º 50
0
    def testWeirdAxisInfos(self):
        """
        If we add a dataset that has the channel axis in the wrong place, 
        the operator should automatically transpose it to be last.
        """
        weirdAxisFilename = os.path.join(self.workingDir, 'WeirdAxes.npy')
        weirdAxisData = numpy.random.random( (3,100,100) )
        numpy.save(weirdAxisFilename, weirdAxisData)

        info = DatasetInfo()
        info.filePath = weirdAxisFilename
        info.axistags = vigra.defaultAxistags('cxy')
        
        graph = Graph()
        op = OpDataSelectionGroup( graph=graph )
        op.WorkingDirectory.setValue( self.workingDir )
        op.DatasetRoles.setValue( ['RoleA'] )

        op.DatasetGroup.resize( 1 )
        op.DatasetGroup[0].setValue( info )

        assert op.ImageGroup[0].ready()
        
        # Note that we expect the channel axis to be transposed to be last.
        expected_data = weirdAxisData.transpose( 1,2,0 )
        data_from_op = op.ImageGroup[0][:].wait()
        
        assert data_from_op.dtype == expected_data.dtype 
        assert data_from_op.shape == expected_data.shape
        assert (data_from_op == expected_data).all()

        # op.Image is a synonym for op.ImageGroup[0]
        assert op.Image.ready()
        assert (op.Image[:].wait() == expected_data).all()
        
        # Ensure that files opened by the inner operators are closed before we exit.
        op.DatasetGroup.resize(0)
    def testNoChannelAxis(self):
        """
        If we add a dataset that is missing a channel axis altogether, 
        the operator should automatically append a channel axis.
        """
        noChannelFilename = os.path.join(self.workingDir, 'NoChannelAxis.npy')
        noChannelData = numpy.random.random((100, 100))
        numpy.save(noChannelFilename, noChannelData)

        info = DatasetInfo()
        info.filePath = noChannelFilename
        info.axistags = vigra.defaultAxistags('xy')

        graph = Graph()
        op = OpDataSelectionGroup(graph=graph)
        op.WorkingDirectory.setValue(self.workingDir)
        op.DatasetRoles.setValue(['RoleA'])

        op.DatasetGroup.resize(1)
        op.DatasetGroup[0].setValue(info)

        assert op.ImageGroup[0].ready()

        # Note that we expect a channel axis to be appended to the data.
        expected_data = noChannelData[:, :, numpy.newaxis]
        data_from_op = op.ImageGroup[0][:].wait()

        assert data_from_op.dtype == expected_data.dtype
        assert data_from_op.shape == expected_data.shape
        assert (data_from_op == expected_data).all()

        # op.Image is a synonym for op.ImageGroup[0]
        assert op.Image.ready()
        assert (op.Image[:].wait() == expected_data).all()

        # Ensure that files opened by the inner operators are closed before we exit.
        op.DatasetGroup.resize(0)
def debug_with_new(shell):
    """
    (Function for debug and testing.)
    """
    projFilePath = "/magnetic/test_project.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)

    workflow = shell.projectManager.workflow

    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo

    rawInfo = DatasetInfo()
    rawInfo.filePath = '/magnetic/synapse_small.npy'
    opDataSelection = workflow.rawDataSelectionApplet.topLevelOperator
    opDataSelection.Dataset.resize(1)
    opDataSelection.Dataset[0].setValue(rawInfo)
    
    binaryInfo = DatasetInfo()
    binaryInfo.filePath = '/magnetic/synapse_small_binary.npy'
    opDataSelection = workflow.dataSelectionApplet.topLevelOperator
    opDataSelection.Dataset.resize(1)
    opDataSelection.Dataset[0].setValue(binaryInfo)
Ejemplo n.º 53
0
    def _get_template_dataset_infos(self, input_axes=None, sequence_axis=None):
        """
        Sometimes the default settings for an input file are not suitable (e.g. the axistags need to be changed).
        We assume the LAST non-batch input in the workflow has settings that will work for all batch processing inputs.
        Here, we get the DatasetInfo objects from that lane and store them as 'templates' to modify for all batch-
        processing files.
        """
        template_infos = {}

        # If there isn't an available dataset to use as a template
        if len(self.dataSelectionApplet.topLevelOperator.DatasetGroup) == 0:
            num_roles = len(
                self.dataSelectionApplet.topLevelOperator.DatasetRoles.value)
            for role_index in range(num_roles):
                template_infos[role_index] = DatasetInfo()
                if input_axes:
                    template_infos[
                        role_index].axistags = vigra.defaultAxistags(
                            input_axes)
                if sequence_axis:
                    template_infos[role_index].sequenceAxis = sequence_axis
            return template_infos

        # Use the LAST non-batch input file as our 'template' for DatasetInfo settings (e.g. axistags)
        template_lane = len(
            self.dataSelectionApplet.topLevelOperator.DatasetGroup) - 1
        opDataSelectionTemplateView = self.dataSelectionApplet.topLevelOperator.getLane(
            template_lane)

        for role_index, info_slot in enumerate(
                opDataSelectionTemplateView.DatasetGroup):
            if info_slot.ready():
                template_infos[role_index] = info_slot.value
            else:
                template_infos[role_index] = DatasetInfo()
            if input_axes:
                # Support the --input_axes arg to override input axis order, same as DataSelection applet.
                template_infos[role_index].axistags = vigra.defaultAxistags(
                    input_axes)
            if sequence_axis:
                template_infos[role_index].sequenceAxis = sequence_axis
        return template_infos
    def test(self):
        """
        Make sure that the dataset roles work the way we expect them to.
        """
        infoA = DatasetInfo()
        infoA.filePath = self.group1Data[0][0]

        infoC = DatasetInfo()
        infoC.filePath = self.group1Data[1][0]

        graph = Graph()
        op = OpDataSelectionGroup(graph=graph)
        op.WorkingDirectory.setValue(self.workingDir)
        op.DatasetRoles.setValue(['RoleA', 'RoleB', 'RoleC'])

        op.DatasetGroup.resize(3)
        op.DatasetGroup[0].setValue(infoA)
        # Leave RoleB blank -- datasets other than the first are optional
        op.DatasetGroup[2].setValue(infoC)

        assert op.ImageGroup[0].ready()
        assert op.ImageGroup[2].ready()

        expectedDataA = self.group1Data[0][1]
        dataFromOpA = op.ImageGroup[0][:].wait()

        assert dataFromOpA.dtype == expectedDataA.dtype
        assert dataFromOpA.shape == expectedDataA.shape
        assert (dataFromOpA == expectedDataA).all()

        expectedDataC = self.group1Data[0][1]
        dataFromOpC = op.ImageGroup[0][:].wait()

        assert dataFromOpC.dtype == expectedDataC.dtype
        assert dataFromOpC.shape == expectedDataC.shape
        assert (dataFromOpC == expectedDataC).all()

        assert op.Image.ready()
        assert (op.Image[:].wait() == expectedDataA).all()

        # Ensure that files opened by the inner operators are closed before we exit.
        op.DatasetGroup.resize(0)
Ejemplo n.º 55
0
    def test_load_single_file_with_glob(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph)
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        info = DatasetInfo(filepath=self.glob_string)
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False

        reader.Dataset.setValues([info])

        # Read the test files using the data selection operator and verify the contents
        imgData = reader.Image[0][...].wait()

        # Check raw images
        assert imgData.shape == self.imgData3Dct.shape

        numpy.testing.assert_array_equal(imgData, self.imgData3Dct)
    def test(self):
        infoA = DatasetInfo()
        infoA.filePath = self.group1Data[0][0]

        infoC = DatasetInfo()
        infoC.filePath = self.group1Data[1][0]

        graph = Graph()
        op = OpDataSelectionGroup(graph=graph)
        op.WorkingDirectory.setValue(self.workingDir)
        op.DatasetRoles.setValue(['RoleA', 'RoleB', 'RoleC'])

        op.DatasetGroup.resize(3)
        op.DatasetGroup[0].setValue(infoA)
        # Leave RoleB blank -- datasets other than the first are optional
        op.DatasetGroup[2].setValue(infoC)

        assert op.ImageGroup[0].ready()
        assert op.ImageGroup[2].ready()

        expectedDataA = self.group1Data[0][1]
        dataFromOpA = op.ImageGroup[0][:].wait()

        assert dataFromOpA.dtype == expectedDataA.dtype
        assert dataFromOpA.shape == expectedDataA.shape
        assert (dataFromOpA == expectedDataA).all()

        expectedDataC = self.group1Data[0][1]
        dataFromOpC = op.ImageGroup[0][:].wait()

        assert dataFromOpC.dtype == expectedDataC.dtype
        assert dataFromOpC.shape == expectedDataC.shape
        assert (dataFromOpC == expectedDataC).all()

        assert op.Image.ready()
        assert (op.Image[:].wait() == expectedDataA).all()
Ejemplo n.º 57
0
    def testBasic(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph)
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        # Create a list of dataset infos . . .
        datasetInfos = []

        # npy
        info = DatasetInfo()
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.filePath = self.testNpyFileName
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False
        datasetInfos.append(info)

        # png
        info = DatasetInfo()
        info.location = DatasetInfo.Location.FileSystem
        info.filePath = self.testPngFileName
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False
        datasetInfos.append(info)

        reader.Dataset.setValues(datasetInfos)

        # Read the test files using the data selection operator and verify the contents
        npyData = reader.Image[0][...].wait()
        pngData = reader.Image[1][...].wait()

        # Check the file name output
        print reader.ImageName[0].value
        assert reader.ImageName[0].value == self.testNpyFileName
        assert reader.ImageName[1].value == self.testPngFileName

        # Check raw images
        assert npyData.shape == (10, 11, 1)
        for x in range(npyData.shape[0]):
            for y in range(npyData.shape[1]):
                assert npyData[x, y, 0] == x + y

        assert pngData.shape == (100, 200, 3)
        for x in range(pngData.shape[0]):
            for y in range(pngData.shape[1]):
                for c in range(pngData.shape[2]):
                    assert pngData[x, y, c] == (x + y) % 256
Ejemplo n.º 58
0
    def _run_export_with_empty_batch_lane(self, role_input_datas,
                                          batch_lane_index, template_infos,
                                          progress_callback, export_to_array):
        """
        Configure the fresh batch lane with the given input files, and export the results.

        role_input_datas: A list of str or DatasetInfo, one item for each dataset-role.
                          (For example, a workflow might have two roles: Raw Data and Binary Segmentation.)

        batch_lane_index: The lane index used as the batch export lane.

        template_infos: A dict of DatasetInfo objects.
                        Settings like axistags, etc. that cannot be automatically inferred
                        from the filepath will be copied from these template objects.
                        (See explanation in _get_template_dataset_infos(), above.)

        progress_callback: Export progress for the current lane is reported via this callback.
        """
        assert role_input_datas[
            0], "At least one file must be provided for each dataset (the first role)."
        opDataSelectionBatchLaneView = self.dataSelectionApplet.topLevelOperator.getLane(
            batch_lane_index)

        # Apply new settings for each role
        for role_index, data_for_role in enumerate(role_input_datas):
            if not data_for_role:
                continue

            if isinstance(data_for_role, DatasetInfo):
                # Caller provided a pre-configured DatasetInfo instead of a just a path
                info = data_for_role
            else:
                # Copy the template info, but override filepath, etc.
                template_info = template_infos[role_index]
                info = DatasetInfo.from_file_path(template_info, data_for_role)

            # Force real data source when in headless mode.
            # If raw data doesn't exist in headless mode, we use fake data reader
            # (datasetInfo.realDataSource = False). Now we need to ensure that
            # the flag is set to True for new image lanes.
            info.realDataSource = True
            # Apply to the data selection operator
            opDataSelectionBatchLaneView.DatasetGroup[role_index].setValue(
                info)

        # Make sure nothing went wrong
        opDataExportBatchlaneView = self.dataExportApplet.topLevelOperator.getLane(
            batch_lane_index)
        # New lanes were added.
        # Give the workflow a chance to restore anything that was unecessarily invalidated (e.g. classifiers)
        self.workflow().handleNewLanesAdded()

        assert opDataExportBatchlaneView.ImageToExport.ready()
        assert opDataExportBatchlaneView.ExportPath.ready()

        # Call customization hook
        self.dataExportApplet.prepare_lane_for_export(batch_lane_index)

        # Finally, run the export
        opDataExportBatchlaneView.progressSignal.subscribe(progress_callback)

        if export_to_array:
            logger.info("Exporting to in-memory array.")
            result = opDataExportBatchlaneView.run_export_to_array()
        else:
            logger.info("Exporting to {}".format(
                opDataExportBatchlaneView.ExportPath.value))
            opDataExportBatchlaneView.run_export()
            result = opDataExportBatchlaneView.ExportPath.value

        # Call customization hook
        self.dataExportApplet.post_process_lane_export(batch_lane_index)

        return result