Exemplo n.º 1
0
    def testBasic_2d_Sequence(self):
        data = 255 * numpy.random.random((10, 50, 100, 3))
        data = data.astype(numpy.uint8)
        data = vigra.taggedView(data, vigra.defaultAxistags("zyxc"))

        # Must run this through an operator
        # Can't use opExport.setValue() because because OpStackWriter can't work with ValueRequests
        graph = Graph()
        opData = OpBlockedArrayCache(graph=graph)
        opData.BlockShape.setValue(data.shape)
        opData.Input.setValue(data)

        filepattern = self._tmpdir + "/test_export_x{x_start}-{x_stop}_y{y_start}-{y_stop}_z{slice_index}"
        opExport = OpExportSlot(graph=graph)
        opExport.Input.connect(opData.Output)
        opExport.OutputFormat.setValue("png sequence")
        opExport.OutputFilenameFormat.setValue(filepattern)
        opExport.CoordinateOffset.setValue((10, 20, 30, 0))

        opExport.run_export()

        export_pattern = opExport.ExportPath.value
        globstring = export_pattern.format(slice_index=999)
        globstring = globstring.replace("999", "*")

        opReader = OpStackLoader(graph=graph)
        try:
            opReader.globstring.setValue(globstring)

            assert opReader.stack.meta.shape == data.shape, "Exported files were of the wrong shape or number."
            assert (opReader.stack[:].wait() == data.view(numpy.ndarray)).all(), "Exported data was not correct"
        finally:
            opReader.cleanUp()
Exemplo n.º 2
0
    def test_Writer(self):
        opData = OpArrayCache( graph=self.graph )
        opData.blockShape.setValue( self.testData.shape )
        opData.Input.setValue( self.testData )
        
        opWriter = OpStackWriter(graph=self.graph)
        opWriter.FilepathPattern.setValue( self._stack_filepattern )
        opWriter.Input.connect( opData.Output )
        #opWriter.Input.setValue( self.testData )
        opWriter.SliceIndexOffset.setValue(22)

        # Run the export
        opWriter.run_export()

        globstring = self._stack_filepattern.format( slice_index=999 )
        globstring = globstring.replace('999', '*')

        opReader = OpStackLoader( graph=self.graph )
        opReader.globstring.setValue( globstring )

        # (The OpStackLoader produces txyzc order.)
        opReorderAxes = OpReorderAxes( graph=self.graph )
        opReorderAxes.AxisOrder.setValue( self._axisorder )
        opReorderAxes.Input.connect( opReader.stack )
        
        readData = opReorderAxes.Output[:].wait()
        logger.debug("Expected shape={}".format( self.testData.shape ) )
        logger.debug("Read shape={}".format( readData.shape ) )
        
        assert opReorderAxes.Output.meta.shape == self.testData.shape, "Exported files were of the wrong shape or number."
        assert (opReorderAxes.Output[:].wait() == self.testData.view( numpy.ndarray )).all(), "Exported data was not correct"
Exemplo n.º 3
0
 def _attemptOpenAsStack(self, filePath):
     if '*' in filePath:
         stackReader = OpStackLoader(parent=self)
         stackReader.globstring.setValue(filePath)
         return (stackReader, stackReader.stack)
     else:
         return (None, None)
Exemplo n.º 4
0
    def test_zyxc_stack_c(self):
        # Test to stack 3D data with channels along the channels.
        # For data preparation only the t axis is used to create a tiff series
        # of 3D+c data, the expected_volume is corrected to 'czyx' with the
        # flag 'stack_existing_channels=True'
        expected_volume, globstring = self._prepare_data(
            "rand_3dc_stack_c", (5, 22, 33, 44, 2),
            "tzyxc",
            "t",
            stack_existing_channels=True)

        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.SequenceAxis.setValue("c")
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 4
        assert op.stack.meta.getAxisKeys() == list("czyx")
        assert op.stack.meta.dtype == expected_volume.dtype

        volume_from_stack = op.stack[:].wait()
        volume_from_stack = vigra.taggedView(volume_from_stack, "czyx")

        assert (volume_from_stack == expected_volume).all(
        ), "3D+c Volume stacked along c did not match expected data."
Exemplo n.º 5
0
 def _attemptOpenAsStack(self, filePath):
     if '*' in filePath or os.path.pathsep in filePath:
         stackReader = OpStackLoader(parent=self)
         stackReader.globstring.setValue(filePath)
         return ([stackReader], stackReader.stack)
     else:
         return ([], None)
    def testLotsOfOptions(self):
        # NOTE: In this test, cmd-line args to nosetests will also end up getting "parsed" by ilastik.
        #       That shouldn't be an issue, since the pixel classification workflow ignores unrecognized options.
        #       See if __name__ == __main__ section, below.
        args = []
        args.append("--project=" + self.PROJECT_FILE)
        args.append("--headless")
        #args.append( "--sys_tmp_dir=/tmp" )

        # Batch export options
        args.append(
            '--output_format=png sequence'
        )  # If we were actually launching from the command line, 'png sequence' would be in quotes...
        args.append(
            "--output_filename_format={dataset_dir}/{nickname}_prediction_z{slice_index}.png"
        )
        args.append("--export_dtype=uint8")
        args.append("--output_axis_order=zxyc")

        args.append("--pipeline_result_drange=(0.0,1.0)")
        args.append("--export_drange=(0,255)")

        args.append("--cutout_subregion=[(0,50,50,0,0), (1, 150, 150, 50, 2)]")
        args.append(self.SAMPLE_DATA)

        sys.argv = [
            'ilastik.py'
        ]  # Clear the existing commandline args so it looks like we're starting fresh.
        sys.argv += args

        # Start up the ilastik.py entry script as if we had launched it from the command line
        # This will execute the batch mode script
        self.ilastik_startup.main()

        output_path = self.SAMPLE_DATA[:-4] + "_prediction_z{slice_index}.png"
        globstring = output_path.format(slice_index=999)
        globstring = globstring.replace('999', '*')

        opReader = OpStackLoader(graph=Graph())
        opReader.globstring.setValue(globstring)

        # (The OpStackLoader produces txyzc order.)
        opReorderAxes = OpReorderAxes(graph=Graph())
        opReorderAxes.AxisOrder.setValue('txyzc')
        opReorderAxes.Input.connect(opReader.stack)

        readData = opReorderAxes.Output[:].wait()

        # Check basic attributes
        assert readData.shape[:-1] == self.data[
            0:1, 50:150, 50:150, 0:50,
            0:2].shape[:-1]  # Assume channel is last axis
        assert readData.shape[
            -1] == 2, "Wrong number of channels.  Expected 2, got {}".format(
                readData.shape[-1])

        # Clean-up.
        opReorderAxes.cleanUp()
        opReader.cleanUp()
Exemplo n.º 7
0
 def _attemptOpenAsStack(self, filePath):
     if "*" in filePath or os.path.pathsep in filePath:
         stackReader = OpStackLoader(parent=self)
         stackReader.SequenceAxis.connect(self.SequenceAxis)
         stackReader.globstring.setValue(filePath)
         return ([stackReader], stackReader.stack)
     else:
         return ([], None)
Exemplo n.º 8
0
    def importStackAsLocalDataset(self, info, sequence_axis='t'):
        """
        Add the given stack data to the project file as a local dataset.
        Does not update the topLevelOperator.
        
        :param info: A DatasetInfo object.
                     Note: info.filePath must be a str which lists the stack files, delimited with os.path.pathsep
                     Note: info will be MODIFIED by this function.  Use the modified info when assigning it to a dataset.
        """
        self.progressSignal.emit(0)
        
        projectFileHdf5 = self.topLevelOperator.ProjectFile.value

        globstring = info.filePath
        info.location = DatasetInfo.Location.ProjectInternal
        firstPathParts = PathComponents(info.filePath.split(os.path.pathsep)[0])
        info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension
        info.fromstack = True

        # Use absolute path
        cwd = self.topLevelOperator.WorkingDirectory
        if os.path.pathsep not in globstring and not os.path.isabs(globstring):
            globstring = os.path.normpath( os.path.join(cwd, globstring) )

        if firstPathParts.extension.lower() in OpTiffReader.TIFF_EXTS:
            # Special loader for TIFFs
            opLoader = OpTiffSequenceReader( parent=self.topLevelOperator.parent )
            opLoader.SequenceAxis.setValue(sequence_axis)
            opLoader.GlobString.setValue(globstring)
            data_slot = opLoader.Output
        else:
            # All other sequences (e.g. pngs, jpegs, etc.)
            opLoader = OpStackLoader( parent=self.topLevelOperator.parent )
            opLoader.SequenceAxis.setValue(sequence_axis)
            opLoader.globstring.setValue(globstring)
            data_slot = opLoader.stack

        try:
            opWriter = OpH5WriterBigDataset(parent=self.topLevelOperator.parent)
            opWriter.hdf5File.setValue(projectFileHdf5)
            opWriter.hdf5Path.setValue(self.topGroupName + '/local_data/' + info.datasetId)
            opWriter.CompressionEnabled.setValue(False)
            # We assume that the main bottleneck is the hard disk, 
            #  so adding lots of threads to access it at once seems like a bad idea.
            opWriter.BatchSize.setValue(1)
            opWriter.Image.connect( data_slot )
                
            # Forward progress from the writer directly to our applet                
            opWriter.progressSignal.subscribe( self.progressSignal.emit )

            success = opWriter.WriteImage.value
        finally:
            opWriter.cleanUp()
            opLoader.cleanUp()
            self.progressSignal.emit(100)

        return success
Exemplo n.º 9
0
    def __init__(self, graph, register=True):
        Operator.__init__(self, graph, register)

        self.graph = graph
        self.loader = OpStackLoader(self.graph)
        self.op5ifyer = Op5ifyer(self.graph)
        self.outpiper = OpArrayPiper(self.graph)
        self.inverter = OpGrayscaleInverter(self.graph)
        self.converter = OpRgbToGrayscale(self.graph)
Exemplo n.º 10
0
    def test_tzyxc(self):
        expected_volume_tzyxc, globstring = self._prepare_data(
            "rand_4dc", (5, 10, 50, 100, 3), "tzyxc", "t")

        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 5
        assert op.stack.meta.getAxisKeys() == list("tzyxc")
        assert op.stack.meta.dtype == expected_volume_tzyxc.dtype

        vol_from_stack_tzyxc = op.stack[:].wait()
        vol_from_stack_tzyxc = vigra.taggedView(vol_from_stack_tzyxc, "tzyxc")

        assert (vol_from_stack_tzyxc == expected_volume_tzyxc
                ).all(), "4D+c Volume from stack did not match expected data."
    def test_zyxc(self):
        expected_volume_zyxc, globstring = self._prepare_data(
            'rand_3dc', (10, 50, 100, 3), 'zyxc', 'z')

        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 4
        assert op.stack.meta.getAxisKeys() == list('zyxc')
        assert op.stack.meta.dtype == expected_volume_zyxc.dtype

        vol_from_stack_zyxc = op.stack[:].wait()
        vol_from_stack_zyxc = vigra.taggedView(vol_from_stack_zyxc, 'zyxc')

        assert (vol_from_stack_zyxc == expected_volume_zyxc).all(), \
            "3D+c Volume from stack did not match expected data."
Exemplo n.º 12
0
    def test_txyz(self):
        expected_volume_tzyx, globstring = self._prepare_data_tzyx()

        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 5
        assert op.stack.meta.getAxisKeys() == list('tzyxc')
        assert op.stack.meta.dtype == expected_volume_tzyx.dtype

        vol_from_stack_tzyxc = op.stack[:].wait()
        vol_from_stack_tzyxc = vigra.taggedView(vol_from_stack_tzyxc, 'tzyxc')
        vol_from_stack_tzyx = vol_from_stack_tzyxc.withAxes(*'tzyx')

        assert (vol_from_stack_tzyx == expected_volume_tzyx
                ).all(), "4D Volume from stack did not match expected data."
Exemplo n.º 13
0
    def test_xyz(self):
        expected_volume, globstring = self._prepare_data(
            "rand_3d", (11, 99, 98), "zyx", "z")

        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 4
        assert op.stack.meta.getAxisKeys() == list("zyxc")
        assert op.stack.meta.dtype == expected_volume.dtype

        volume_from_stack = op.stack[:].wait()
        volume_from_stack = vigra.taggedView(volume_from_stack, "zyxc")
        volume_from_stack = volume_from_stack.withAxes(*"zyx")

        assert (volume_from_stack == expected_volume
                ).all(), "3D Volume from stack did not match expected data."
Exemplo n.º 14
0
    def test_xyz_stack_c(self):
        # expected_volume_zyxc, globstring = self._prepare_data_zyx_stack_c()
        expected_volume, globstring = self._prepare_data(
            "rand_3d_stack_c", (2, 3, 5, 4), "czxy", "c")

        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.SequenceAxis.setValue("c")
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 4
        assert op.stack.meta.getAxisKeys() == list("czyx")
        assert op.stack.meta.dtype == expected_volume.dtype

        volume_from_stack = op.stack[:].wait()
        volume_from_stack = vigra.taggedView(volume_from_stack, "czyx")
        volume_from_stack = volume_from_stack.withAxes(*"czxy")

        assert (volume_from_stack == expected_volume).all(
        ), "3D Volume stacked along c did not match expected data."
Exemplo n.º 15
0
    def testBasic_MultipageTiffSequence(self):
        data = 255 * numpy.random.random((5, 10, 50, 100, 3))
        data = data.astype(numpy.uint8)
        data = vigra.taggedView(data, vigra.defaultAxistags('tzyxc'))

        # Must run this through an operator
        # Can't use opExport.setValue() because because OpStackWriter can't work with ValueRequests
        graph = Graph()
        opData = OpArrayCache(graph=graph)
        opData.blockShape.setValue(data.shape)
        opData.Input.setValue(data)

        filepattern = self._tmpdir + '/test_export_x{x_start}-{x_stop}_y{y_start}-{y_stop}_t{slice_index}'
        opExport = OpExportSlot(graph=graph)
        opExport.Input.connect(opData.Output)
        opExport.OutputFormat.setValue('multipage tiff sequence')
        opExport.OutputFilenameFormat.setValue(filepattern)
        opExport.CoordinateOffset.setValue((7, 10, 20, 30, 0))

        opExport.run_export()

        export_pattern = opExport.ExportPath.value
        globstring = export_pattern.format(slice_index=999)
        globstring = globstring.replace('999', '*')

        opReader = OpStackLoader(graph=graph)
        opReader.globstring.setValue(globstring)

        # (The OpStackLoader produces txyzc order.)
        opReorderAxes = OpReorderAxes(graph=graph)
        opReorderAxes.AxisOrder.setValue('tzyxc')
        opReorderAxes.Input.connect(opReader.stack)

        assert opReorderAxes.Output.meta.shape == data.shape, "Exported files were of the wrong shape or number."
        assert (opReorderAxes.Output[:].wait() == data.view(
            numpy.ndarray)).all(), "Exported data was not correct"

        # Cleanup
        opReorderAxes.cleanUp()
        opReader.cleanUp()
Exemplo n.º 16
0
    def test_stack_pngs(self, inputdata_dir):
        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.SequenceAxis.setValue("c")

        globstring = os.path.join(inputdata_dir, "3c[0-2].png")
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 3
        assert op.stack.meta.getAxisKeys() == list("xyc")

        stack = op.stack[:].wait()

        gt_path = os.path.join(inputdata_dir, "3cRGB.h5")
        h5File = h5py.File(gt_path, "r")
        expected = h5File["data"]

        assert stack.dtype == expected.dtype
        assert stack.shape == expected.shape

        assert (stack == expected
                ).all(), "stacked 2d images did not match expected data."
    def test_stack_pngs(self):
        graph = Graph()
        op = OpStackLoader(graph=graph)
        op.SequenceAxis.setValue('c')

        globstring = os.path.join('data', 'inputdata', '3c[0-2].png')
        op.globstring.setValue(globstring)

        assert len(op.stack.meta.axistags) == 3
        assert op.stack.meta.getAxisKeys() == list('xyc')

        stack = op.stack[:].wait()

        gt_path = os.path.join('data', 'inputdata', '3cRGB.h5')
        h5File = h5py.File(gt_path, 'r')
        expected = h5File['data']

        assert stack.dtype == expected.dtype
        assert stack.shape == expected.shape

        assert (stack == expected).all(), \
            "stacked 2d images did not match expected data."
    def testLotsOfOptions(self):
        # OLD_LAZYFLOW_STATUS_MONITOR_SECONDS = os.getenv("LAZYFLOW_STATUS_MONITOR_SECONDS", None)
        # os.environ["LAZYFLOW_STATUS_MONITOR_SECONDS"] = "1"

        # NOTE: In this test, cmd-line args to tests will also end up getting "parsed" by ilastik.
        #       That shouldn't be an issue, since the pixel classification workflow ignores unrecognized options.
        #       See if __name__ == __main__ section, below.
        args = []
        args.append("--project=" + self.PROJECT_FILE)
        args.append("--headless")
        # args.append( "--sys_tmp_dir=/tmp" )

        # Batch export options
        args.append("--export_source=Simple Segmentation")
        args.append(
            "--output_format=png sequence"
        )  # If we were actually launching from the command line, 'png sequence' would be in quotes...
        args.append("--output_filename_format={dataset_dir}/{nickname}_segmentation_z{slice_index}.png")
        args.append("--export_dtype=uint8")
        args.append("--output_axis_order=zxyc")

        args.append("--pipeline_result_drange=(0,2)")
        args.append("--export_drange=(0,255)")

        args.append("--cutout_subregion=[(0,10,10,0,0), (1, 20, 20, 5, 1)]")
        args.append(self.SAMPLE_DATA)

        old_sys_argv = list(sys.argv)
        sys.argv = ["ilastik.py"]  # Clear the existing commandline args so it looks like we're starting fresh.
        sys.argv += args

        # Start up the ilastik.py entry script as if we had launched it from the command line
        # This will execute the batch mode script
        try:
            self.ilastik_startup.main()
        finally:
            sys.argv = old_sys_argv
        #             if OLD_LAZYFLOW_STATUS_MONITOR_SECONDS:
        #                 os.environ["LAZYFLOW_STATUS_MONITOR_SECONDS"] = OLD_LAZYFLOW_STATUS_MONITOR_SECONDS

        output_path = self.SAMPLE_DATA[:-4] + "_segmentation_z{slice_index}.png"
        globstring = output_path.format(slice_index=999)
        globstring = globstring.replace("999", "*")

        opReader = OpStackLoader(graph=Graph())
        opReader.globstring.setValue(globstring)

        # (The OpStackLoader produces txyzc order.)
        opReorderAxes = OpReorderAxes(graph=Graph())
        opReorderAxes.AxisOrder.setValue("tzyxc")
        opReorderAxes.Input.connect(opReader.stack)

        try:
            readData = opReorderAxes.Output[:].wait()

            # Check basic attributes
            assert readData.shape[:-1] == (1, 10, 10, 5), readData.shape[:-1]  # Assume channel is last axis
            assert readData.shape[-1] == 1, "Wrong number of channels.  Expected 1, got {}".format(readData.shape[-1])
        finally:
            # Clean-up.
            opReorderAxes.cleanUp()
            opReader.cleanUp()
Exemplo n.º 19
0
    def importStackAsLocalDataset(self, info, sequence_axis='t'):
        """
        Add the given stack data to the project file as a local dataset.
        Does not update the topLevelOperator.
        
        :param info: A DatasetInfo object.
                     Note: info.filePath must be a str which lists the stack files, delimited with os.path.pathsep
                     Note: info will be MODIFIED by this function.  Use the modified info when assigning it to a dataset.
        """
        self.progressSignal(0)

        projectFileHdf5 = self.topLevelOperator.ProjectFile.value

        globstring = info.filePath
        info.location = DatasetInfo.Location.ProjectInternal
        firstPathParts = PathComponents(
            info.filePath.split(os.path.pathsep)[0])
        info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension
        info.fromstack = True

        # Use absolute path
        cwd = self.topLevelOperator.WorkingDirectory
        if os.path.pathsep not in globstring and not os.path.isabs(globstring):
            globstring = os.path.normpath(os.path.join(cwd, globstring))

        if firstPathParts.extension.lower() in OpTiffReader.TIFF_EXTS:
            # Special loader for TIFFs
            opLoader = OpTiffSequenceReader(
                parent=self.topLevelOperator.parent)
            opLoader.SequenceAxis.setValue(sequence_axis)
            opLoader.GlobString.setValue(globstring)
            data_slot = opLoader.Output
        elif firstPathParts.extension.lower() in (
                OpStreamingH5N5SequenceReaderM.H5EXTS +
                OpStreamingH5N5SequenceReaderM.N5EXTS):
            # Now use the .checkGlobString method of the stack readers
            isSingleFile = True
            try:
                OpStreamingH5N5SequenceReaderS.checkGlobString(globstring)
            except (OpStreamingH5N5SequenceReaderS.NoInternalPlaceholderError,
                    OpStreamingH5N5SequenceReaderS.NotTheSameFileError,
                    OpStreamingH5N5SequenceReaderS.ExternalPlaceholderError):
                isSingleFile = False

            isMultiFile = True
            try:
                OpStreamingH5N5SequenceReaderM.checkGlobString(globstring)
            except (OpStreamingH5N5SequenceReaderM.NoExternalPlaceholderError,
                    OpStreamingH5N5SequenceReaderM.SameFileError,
                    OpStreamingH5N5SequenceReaderM.InternalPlaceholderError):
                isMultiFile = False

            assert (not (isMultiFile and isSingleFile)), (
                "Something is wrong, glob string shouldn't allow both")
            assert (isMultiFile or isSingleFile), (
                "Glob string doesn't conform to h5 stack glob string rules")

            if isSingleFile:
                opLoader = OpStreamingH5N5SequenceReaderS(
                    parent=self.topLevelOperator.parent)
            elif isMultiFile:
                opLoader = OpStreamingH5N5SequenceReaderM(
                    parent=self.topLevelOperator.parent)

            opLoader.SequenceAxis.setValue(sequence_axis)
            opLoader.GlobString.setValue(globstring)
            data_slot = opLoader.OutputImage
        else:
            # All other sequences (e.g. pngs, jpegs, etc.)
            opLoader = OpStackLoader(parent=self.topLevelOperator.parent)
            opLoader.SequenceAxis.setValue(sequence_axis)
            opLoader.globstring.setValue(globstring)
            data_slot = opLoader.stack

        try:
            opWriter = OpH5N5WriterBigDataset(
                parent=self.topLevelOperator.parent)
            opWriter.h5N5File.setValue(projectFileHdf5)
            opWriter.h5N5Path.setValue(self.topGroupName + '/local_data/' +
                                       info.datasetId)
            opWriter.CompressionEnabled.setValue(False)
            # We assume that the main bottleneck is the hard disk,
            #  so adding lots of threads to access it at once seems like a bad idea.
            opWriter.BatchSize.setValue(1)
            opWriter.Image.connect(data_slot)

            # Forward progress from the writer directly to our applet
            opWriter.progressSignal.subscribe(self.progressSignal)

            success = opWriter.WriteImage.value
        finally:
            opWriter.cleanUp()
            opLoader.cleanUp()
            self.progressSignal(100)

        return success