def test_Writer(self): opData = OpArrayCache(graph=self.graph) opData.blockShape.setValue(self.testData.shape) opData.Input.setValue(self.testData) opExport = OpExportMultipageTiffSequence(graph=self.graph) opExport.FilepathPattern.setValue(self._stack_filepattern) opExport.Input.connect(opData.Output) opExport.SliceIndexOffset.setValue(22) # Run the export opExport.run_export() globstring = self._stack_filepattern.format(slice_index=999) globstring = globstring.replace('999', '*') opReader = OpStackLoader(graph=self.graph) opReader.globstring.setValue(globstring) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes(graph=self.graph) opReorderAxes.AxisOrder.setValue(self._axisorder) opReorderAxes.Input.connect(opReader.stack) readData = opReorderAxes.Output[:].wait() logger.debug("Expected shape={}".format(self.testData.shape)) logger.debug("Read shape={}".format(readData.shape)) assert opReorderAxes.Output.meta.shape == self.testData.shape, "Exported files were of the wrong shape or number." assert (opReorderAxes.Output[:].wait() == self.testData.view( numpy.ndarray)).all(), "Exported data was not correct" opReorderAxes.cleanUp() opReader.cleanUp()
def testBasic_2d_Sequence(self): data = 255 * numpy.random.random((10, 50, 100, 3)) data = data.astype(numpy.uint8) data = vigra.taggedView(data, vigra.defaultAxistags("zyxc")) # Must run this through an operator # Can't use opExport.setValue() because because OpStackWriter can't work with ValueRequests graph = Graph() opData = OpBlockedArrayCache(graph=graph) opData.BlockShape.setValue(data.shape) opData.Input.setValue(data) filepattern = self._tmpdir + "/test_export_x{x_start}-{x_stop}_y{y_start}-{y_stop}_z{slice_index}" opExport = OpExportSlot(graph=graph) opExport.Input.connect(opData.Output) opExport.OutputFormat.setValue("png sequence") opExport.OutputFilenameFormat.setValue(filepattern) opExport.CoordinateOffset.setValue((10, 20, 30, 0)) opExport.run_export() export_pattern = opExport.ExportPath.value globstring = export_pattern.format(slice_index=999) globstring = globstring.replace("999", "*") opReader = OpStackLoader(graph=graph) try: opReader.globstring.setValue(globstring) assert opReader.stack.meta.shape == data.shape, "Exported files were of the wrong shape or number." assert (opReader.stack[:].wait() == data.view(numpy.ndarray)).all(), "Exported data was not correct" finally: opReader.cleanUp()
def test_Writer(self): opData = OpArrayCache( graph=self.graph ) opData.blockShape.setValue( self.testData.shape ) opData.Input.setValue( self.testData ) opExport = OpExportMultipageTiffSequence(graph=self.graph) opExport.FilepathPattern.setValue( self._stack_filepattern ) opExport.Input.connect( opData.Output ) opExport.SliceIndexOffset.setValue(22) # Run the export opExport.run_export() globstring = self._stack_filepattern.format( slice_index=999 ) globstring = globstring.replace('999', '*') opReader = OpStackLoader( graph=self.graph ) opReader.globstring.setValue( globstring ) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes( graph=self.graph ) opReorderAxes.AxisOrder.setValue( self._axisorder ) opReorderAxes.Input.connect( opReader.stack ) readData = opReorderAxes.Output[:].wait() logger.debug("Expected shape={}".format( self.testData.shape ) ) logger.debug("Read shape={}".format( readData.shape ) ) assert opReorderAxes.Output.meta.shape == self.testData.shape, "Exported files were of the wrong shape or number." assert (opReorderAxes.Output[:].wait() == self.testData.view( numpy.ndarray )).all(), "Exported data was not correct" opReorderAxes.cleanUp() opReader.cleanUp()
def testLotsOfOptions(self): #OLD_LAZYFLOW_STATUS_MONITOR_SECONDS = os.getenv("LAZYFLOW_STATUS_MONITOR_SECONDS", None) #os.environ["LAZYFLOW_STATUS_MONITOR_SECONDS"] = "1" # NOTE: In this test, cmd-line args to nosetests will also end up getting "parsed" by ilastik. # That shouldn't be an issue, since the pixel classification workflow ignores unrecognized options. # See if __name__ == __main__ section, below. args = [] args.append( "--project=" + self.PROJECT_FILE ) args.append( "--headless" ) #args.append( "--sys_tmp_dir=/tmp" ) # Batch export options args.append( '--export_source=Simple Segmentation' ) args.append( '--output_format=png sequence' ) # If we were actually launching from the command line, 'png sequence' would be in quotes... args.append( "--output_filename_format={dataset_dir}/{nickname}_segmentation_z{slice_index}.png" ) args.append( "--export_dtype=uint8" ) args.append( "--output_axis_order=zxyc" ) args.append( "--pipeline_result_drange=(0,2)" ) args.append( "--export_drange=(0,255)" ) args.append( "--cutout_subregion=[(0,50,50,0,0), (1, 150, 150, 50, 1)]" ) args.append( self.SAMPLE_DATA ) old_sys_argv = list(sys.argv) sys.argv = ['ilastik.py'] # Clear the existing commandline args so it looks like we're starting fresh. sys.argv += args # Start up the ilastik.py entry script as if we had launched it from the command line # This will execute the batch mode script try: self.ilastik_startup.main() finally: sys.argv = old_sys_argv # if OLD_LAZYFLOW_STATUS_MONITOR_SECONDS: # os.environ["LAZYFLOW_STATUS_MONITOR_SECONDS"] = OLD_LAZYFLOW_STATUS_MONITOR_SECONDS output_path = self.SAMPLE_DATA[:-4] + "_segmentation_z{slice_index}.png" globstring = output_path.format( slice_index=999 ) globstring = globstring.replace('999', '*') opReader = OpStackLoader( graph=Graph() ) opReader.globstring.setValue( globstring ) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes( graph=Graph() ) opReorderAxes.AxisOrder.setValue( 'tzyxc' ) opReorderAxes.Input.connect( opReader.stack ) try: readData = opReorderAxes.Output[:].wait() # Check basic attributes assert readData.shape[:-1] == self.data[0:1, 50:150, 50:150, 0:50, 0:1].shape[:-1] # Assume channel is last axis assert readData.shape[-1] == 1, "Wrong number of channels. Expected 1, got {}".format( readData.shape[-1] ) finally: # Clean-up. opReorderAxes.cleanUp() opReader.cleanUp()
def testLotsOfOptions(self): # NOTE: In this test, cmd-line args to nosetests will also end up getting "parsed" by ilastik. # That shouldn't be an issue, since the pixel classification workflow ignores unrecognized options. # See if __name__ == __main__ section, below. args = [] args.append("--project=" + self.PROJECT_FILE) args.append("--headless") #args.append( "--sys_tmp_dir=/tmp" ) # Batch export options args.append( '--output_format=png sequence' ) # If we were actually launching from the command line, 'png sequence' would be in quotes... args.append( "--output_filename_format={dataset_dir}/{nickname}_prediction_z{slice_index}.png" ) args.append("--export_dtype=uint8") args.append("--output_axis_order=zxyc") args.append("--pipeline_result_drange=(0.0,1.0)") args.append("--export_drange=(0,255)") args.append("--cutout_subregion=[(0,50,50,0,0), (1, 150, 150, 50, 2)]") args.append(self.SAMPLE_DATA) sys.argv = [ 'ilastik.py' ] # Clear the existing commandline args so it looks like we're starting fresh. sys.argv += args # Start up the ilastik.py entry script as if we had launched it from the command line # This will execute the batch mode script self.ilastik_startup.main() output_path = self.SAMPLE_DATA[:-4] + "_prediction_z{slice_index}.png" globstring = output_path.format(slice_index=999) globstring = globstring.replace('999', '*') opReader = OpStackLoader(graph=Graph()) opReader.globstring.setValue(globstring) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes(graph=Graph()) opReorderAxes.AxisOrder.setValue('txyzc') opReorderAxes.Input.connect(opReader.stack) readData = opReorderAxes.Output[:].wait() # Check basic attributes assert readData.shape[:-1] == self.data[ 0:1, 50:150, 50:150, 0:50, 0:2].shape[:-1] # Assume channel is last axis assert readData.shape[ -1] == 2, "Wrong number of channels. Expected 2, got {}".format( readData.shape[-1]) # Clean-up. opReorderAxes.cleanUp() opReader.cleanUp()
def testLotsOfOptions(self): # NOTE: In this test, cmd-line args to nosetests will also end up getting "parsed" by ilastik. # That shouldn't be an issue, since the pixel classification workflow ignores unrecognized options. # See if __name__ == __main__ section, below. args = [] args.append( "--project=" + self.PROJECT_FILE ) args.append( "--headless" ) args.append( "--sys_tmp_dir=/tmp" ) # Batch export options args.append( '--output_format=png sequence' ) # If we were actually launching from the command line, 'png sequence' would be in quotes... args.append( "--output_filename_format={dataset_dir}/{nickname}_prediction_z{slice_index}.png" ) args.append( "--export_dtype=uint8" ) args.append( "--output_axis_order=zxyc" ) args.append( "--pipeline_result_drange=(0.0,1.0)" ) args.append( "--export_drange=(0,255)" ) args.append( "--cutout_subregion=[(0,50,50,0,0), (1, 150, 150, 50, 2)]" ) args.append( self.SAMPLE_DATA ) sys.argv = ['ilastik.py'] # Clear the existing commandline args so it looks like we're starting fresh. sys.argv += args # Start up the ilastik.py entry script as if we had launched it from the command line # This will execute the batch mode script ilastik_entry_file_path = os.path.join( os.path.split( ilastik.__file__ )[0], "../ilastik.py" ) imp.load_source( 'main', ilastik_entry_file_path ) output_path = self.SAMPLE_DATA[:-4] + "_prediction_z{slice_index}.png" globstring = output_path.format( slice_index=999 ) globstring = globstring.replace('999', '*') opReader = OpStackLoader( graph=Graph() ) opReader.globstring.setValue( globstring ) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes( graph=Graph() ) opReorderAxes.AxisOrder.setValue( 'txyzc' ) opReorderAxes.Input.connect( opReader.stack ) readData = opReorderAxes.Output[:].wait() # Check basic attributes assert readData.shape[:-1] == self.data[0:1, 50:150, 50:150, 0:50, 0:2].shape[:-1] # Assume channel is last axis assert readData.shape[-1] == 2, "Wrong number of channels. Expected 2, got {}".format( readData.shape[-1] ) # Clean-up. opReorderAxes.cleanUp() opReader.cleanUp()
def _chooseDirectory(self): # Find the directory of the most recently opened image file mostRecentStackDirectory = PreferencesManager().get( 'DataSelection', 'recent stack directory') if mostRecentStackDirectory is not None: defaultDirectory = os.path.split(mostRecentStackDirectory)[0] else: defaultDirectory = os.path.expanduser('~') options = QFileDialog.Options(QFileDialog.ShowDirsOnly) if ilastik.config.cfg.getboolean("ilastik", "debug"): options |= QFileDialog.DontUseNativeDialog # Launch the "Open File" dialog directory = QFileDialog.getExistingDirectory(self, "Image Stack Directory", defaultDirectory, options=options) if directory.isNull(): # User cancelled return directory = encode_from_qstring(directory) PreferencesManager().set('DataSelection', 'recent stack directory', directory) self.directoryEdit.setText(decode_to_qstring(directory)) globstring = self._getGlobString(directory) if globstring: filenames = OpStackLoader.expandGlobStrings(globstring) self._updateFileList(sorted(filenames)) # As a convenience, also show the glob string in the pattern field self.patternEdit.setText(decode_to_qstring(globstring))
def _attemptOpenAsStack(self, filePath): if '*' in filePath: stackReader = OpStackLoader(parent=self) stackReader.globstring.setValue(filePath) return (stackReader, stackReader.stack) else: return (None, None)
def test_zyxc_stack_c(self): # Test to stack 3D data with channels along the channels. # For data preparation only the t axis is used to create a tiff series # of 3D+c data, the expected_volume is corrected to 'czyx' with the # flag 'stack_existing_channels=True' expected_volume, globstring = self._prepare_data( "rand_3dc_stack_c", (5, 22, 33, 44, 2), "tzyxc", "t", stack_existing_channels=True) graph = Graph() op = OpStackLoader(graph=graph) op.SequenceAxis.setValue("c") op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 4 assert op.stack.meta.getAxisKeys() == list("czyx") assert op.stack.meta.dtype == expected_volume.dtype volume_from_stack = op.stack[:].wait() volume_from_stack = vigra.taggedView(volume_from_stack, "czyx") assert (volume_from_stack == expected_volume).all( ), "3D+c Volume stacked along c did not match expected data."
def _attemptOpenAsStack(self, filePath): if '*' in filePath or os.path.pathsep in filePath: stackReader = OpStackLoader(parent=self) stackReader.globstring.setValue(filePath) return ([stackReader], stackReader.stack) else: return ([], None)
def test_Writer(self): opData = OpBlockedArrayCache(graph=self.graph) opData.BlockShape.setValue(self.testData.shape) opData.Input.setValue(self.testData) opWriter = OpStackWriter(graph=self.graph) opWriter.FilepathPattern.setValue(self._stack_filepattern) opWriter.Input.connect(opData.Output) # opWriter.Input.setValue( self.testData ) opWriter.SliceIndexOffset.setValue(22) # Run the export opWriter.run_export() globstring = self._stack_filepattern.format(slice_index=999) globstring = globstring.replace("999", "*") opReader = OpStackLoader(graph=self.graph) opReader.globstring.setValue(globstring) # (The OpStackLoader might produce different order.) opReorderAxes = OpReorderAxes(graph=self.graph) opReorderAxes.AxisOrder.setValue(self._axisorder) opReorderAxes.Input.connect(opReader.stack) readData = opReorderAxes.Output[:].wait() logger.debug("Expected shape={}".format(self.testData.shape)) logger.debug("Read shape={}".format(readData.shape)) assert (opReorderAxes.Output.meta.shape == self.testData.shape ), "Exported files were of the wrong shape or number." assert (opReorderAxes.Output[:].wait() == self.testData.view( numpy.ndarray)).all(), "Exported data was not correct"
def _chooseDirectory(self): # Find the directory of the most recently opened image file mostRecentStackDirectory = PreferencesManager().get("DataSelection", "recent stack directory") if mostRecentStackDirectory is not None: defaultDirectory = os.path.split(mostRecentStackDirectory)[0] else: defaultDirectory = os.path.expanduser("~") options = QFileDialog.Options(QFileDialog.ShowDirsOnly) if ilastik.config.cfg.getboolean("ilastik", "debug"): options |= QFileDialog.DontUseNativeDialog # Launch the "Open File" dialog directory = QFileDialog.getExistingDirectory(self, "Image Stack Directory", defaultDirectory, options=options) if directory.isNull(): # User cancelled return directory = encode_from_qstring(directory) PreferencesManager().set("DataSelection", "recent stack directory", directory) self.directoryEdit.setText(decode_to_qstring(directory)) globstring = self._getGlobString(directory) if globstring: filenames = OpStackLoader.expandGlobStrings(globstring) self._updateFileList(sorted(filenames)) # As a convenience, also show the glob string in the pattern field self.patternEdit.setText(decode_to_qstring(globstring))
def _attemptOpenAsStack(self, filePath): if "*" in filePath or os.path.pathsep in filePath: stackReader = OpStackLoader(parent=self) stackReader.SequenceAxis.connect(self.SequenceAxis) stackReader.globstring.setValue(filePath) return ([stackReader], stackReader.stack) else: return ([], None)
def __init__(self, graph, register=True): Operator.__init__(self, graph, register) self.graph = graph self.loader = OpStackLoader(self.graph) self.op5ifyer = Op5ifyer(self.graph) self.outpiper = OpArrayPiper(self.graph) self.inverter = OpGrayscaleInverter(self.graph) self.converter = OpRgbToGrayscale(self.graph)
def importStackAsLocalDataset(self, info, sequence_axis='t'): """ Add the given stack data to the project file as a local dataset. Does not update the topLevelOperator. :param info: A DatasetInfo object. Note: info.filePath must be a str which lists the stack files, delimited with os.path.pathsep Note: info will be MODIFIED by this function. Use the modified info when assigning it to a dataset. """ self.progressSignal.emit(0) projectFileHdf5 = self.topLevelOperator.ProjectFile.value globstring = info.filePath info.location = DatasetInfo.Location.ProjectInternal firstPathParts = PathComponents(info.filePath.split(os.path.pathsep)[0]) info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension info.fromstack = True # Use absolute path cwd = self.topLevelOperator.WorkingDirectory if os.path.pathsep not in globstring and not os.path.isabs(globstring): globstring = os.path.normpath( os.path.join(cwd, globstring) ) if firstPathParts.extension.lower() in OpTiffReader.TIFF_EXTS: # Special loader for TIFFs opLoader = OpTiffSequenceReader( parent=self.topLevelOperator.parent ) opLoader.SequenceAxis.setValue(sequence_axis) opLoader.GlobString.setValue(globstring) data_slot = opLoader.Output else: # All other sequences (e.g. pngs, jpegs, etc.) opLoader = OpStackLoader( parent=self.topLevelOperator.parent ) opLoader.SequenceAxis.setValue(sequence_axis) opLoader.globstring.setValue(globstring) data_slot = opLoader.stack try: opWriter = OpH5WriterBigDataset(parent=self.topLevelOperator.parent) opWriter.hdf5File.setValue(projectFileHdf5) opWriter.hdf5Path.setValue(self.topGroupName + '/local_data/' + info.datasetId) opWriter.CompressionEnabled.setValue(False) # We assume that the main bottleneck is the hard disk, # so adding lots of threads to access it at once seems like a bad idea. opWriter.BatchSize.setValue(1) opWriter.Image.connect( data_slot ) # Forward progress from the writer directly to our applet opWriter.progressSignal.subscribe( self.progressSignal.emit ) success = opWriter.WriteImage.value finally: opWriter.cleanUp() opLoader.cleanUp() self.progressSignal.emit(100) return success
def testBasic_MultipageTiffSequence(self): data = 255 * numpy.random.random((5, 10, 50, 100, 3)) data = data.astype(numpy.uint8) data = vigra.taggedView(data, vigra.defaultAxistags('tzyxc')) # Must run this through an operator # Can't use opExport.setValue() because because OpStackWriter can't work with ValueRequests graph = Graph() opData = OpArrayCache(graph=graph) opData.blockShape.setValue(data.shape) opData.Input.setValue(data) filepattern = self._tmpdir + '/test_export_x{x_start}-{x_stop}_y{y_start}-{y_stop}_t{slice_index}' opExport = OpExportSlot(graph=graph) opExport.Input.connect(opData.Output) opExport.OutputFormat.setValue('multipage tiff sequence') opExport.OutputFilenameFormat.setValue(filepattern) opExport.CoordinateOffset.setValue((7, 10, 20, 30, 0)) opExport.run_export() export_pattern = opExport.ExportPath.value globstring = export_pattern.format(slice_index=999) globstring = globstring.replace('999', '*') opReader = OpStackLoader(graph=graph) opReader.globstring.setValue(globstring) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes(graph=graph) opReorderAxes.AxisOrder.setValue('tzyxc') opReorderAxes.Input.connect(opReader.stack) assert opReorderAxes.Output.meta.shape == data.shape, "Exported files were of the wrong shape or number." assert (opReorderAxes.Output[:].wait() == data.view( numpy.ndarray)).all(), "Exported data was not correct" # Cleanup opReorderAxes.cleanUp() opReader.cleanUp()
def testBasic_MultipageTiffSequence(self): data = 255 * numpy.random.random( (5, 10, 50,100, 3) ) data = data.astype( numpy.uint8 ) data = vigra.taggedView( data, vigra.defaultAxistags('tzyxc') ) # Must run this through an operator # Can't use opExport.setValue() because because OpStackWriter can't work with ValueRequests graph = Graph() opData = OpArrayCache( graph=graph ) opData.blockShape.setValue( data.shape ) opData.Input.setValue( data ) filepattern = self._tmpdir + '/test_export_x{x_start}-{x_stop}_y{y_start}-{y_stop}_t{slice_index}' opExport = OpExportSlot(graph=graph) opExport.Input.connect( opData.Output ) opExport.OutputFormat.setValue( 'multipage tiff sequence' ) opExport.OutputFilenameFormat.setValue( filepattern ) opExport.CoordinateOffset.setValue( (7, 10, 20, 30, 0) ) opExport.run_export() export_pattern = opExport.ExportPath.value globstring = export_pattern.format( slice_index=999 ) globstring = globstring.replace('999', '*') opReader = OpStackLoader( graph=graph ) opReader.globstring.setValue( globstring ) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes( graph=graph ) opReorderAxes.AxisOrder.setValue( 'tzyxc' ) opReorderAxes.Input.connect( opReader.stack ) assert opReorderAxes.Output.meta.shape == data.shape, "Exported files were of the wrong shape or number." assert (opReorderAxes.Output[:].wait() == data.view( numpy.ndarray )).all(), "Exported data was not correct" # Cleanup opReorderAxes.cleanUp() opReader.cleanUp()
def _applyPattern(self): globStrings = self.patternEdit.text() H5EXTS = OpStreamingH5N5SequenceReaderM.H5EXTS N5EXTS = OpStreamingH5N5SequenceReaderM.N5EXTS filenames = [] # see if some glob strings include HDF5 and/or N5 files globStrings = globStrings.split(os.path.pathsep) pcs = [PathComponents(x) for x in globStrings] is_h5_n5 = [x.extension in (H5EXTS + N5EXTS) for x in pcs] h5GlobStrings = os.path.pathsep.join( [x for x, y in zip(globStrings, is_h5_n5) if y is True]) globStrings = os.path.pathsep.join( [x for x, y in zip(globStrings, is_h5_n5) if y is False]) filenames.extend(OpStackLoader.expandGlobStrings(globStrings)) try: OpStreamingH5N5SequenceReaderS.checkGlobString(h5GlobStrings) # OK, if nothing raised there is a single h5 file in h5GlobStrings: pathComponents = PathComponents( h5GlobStrings.split(os.path.pathsep)[0]) h5file = OpStreamingH5N5Reader.get_h5_n5_file( pathComponents.externalPath, mode="r") filenames.extend( "{}/{}".format(pathComponents.externalPath, internal) for internal in OpStreamingH5N5SequenceReaderS. expandGlobStrings(h5file, h5GlobStrings)) except ( OpStreamingH5N5SequenceReaderS.WrongFileTypeError, OpStreamingH5N5SequenceReaderS.NotTheSameFileError, OpStreamingH5N5SequenceReaderS.NoInternalPlaceholderError, OpStreamingH5N5SequenceReaderS.ExternalPlaceholderError, ): pass try: OpStreamingH5N5SequenceReaderM.checkGlobString(h5GlobStrings) filenames.extend( "{}/{}".format(external, internal) for external, internal in zip( *OpStreamingH5N5SequenceReaderM.expandGlobStrings( h5GlobStrings))) except ( OpStreamingH5N5SequenceReaderM.WrongFileTypeError, OpStreamingH5N5SequenceReaderM.SameFileError, OpStreamingH5N5SequenceReaderM.NoExternalPlaceholderError, OpStreamingH5N5SequenceReaderM.InternalPlaceholderError, ): pass self._updateFileList(filenames)
def test_zyxc(self): expected_volume_zyxc, globstring = self._prepare_data( 'rand_3dc', (10, 50, 100, 3), 'zyxc', 'z') graph = Graph() op = OpStackLoader(graph=graph) op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 4 assert op.stack.meta.getAxisKeys() == list('zyxc') assert op.stack.meta.dtype == expected_volume_zyxc.dtype vol_from_stack_zyxc = op.stack[:].wait() vol_from_stack_zyxc = vigra.taggedView(vol_from_stack_zyxc, 'zyxc') assert (vol_from_stack_zyxc == expected_volume_zyxc).all(), \ "3D+c Volume from stack did not match expected data."
def test_txyz(self): expected_volume_tzyx, globstring = self._prepare_data_tzyx() graph = Graph() op = OpStackLoader(graph=graph) op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 5 assert op.stack.meta.getAxisKeys() == list('tzyxc') assert op.stack.meta.dtype == expected_volume_tzyx.dtype vol_from_stack_tzyxc = op.stack[:].wait() vol_from_stack_tzyxc = vigra.taggedView(vol_from_stack_tzyxc, 'tzyxc') vol_from_stack_tzyx = vol_from_stack_tzyxc.withAxes(*'tzyx') assert (vol_from_stack_tzyx == expected_volume_tzyx ).all(), "4D Volume from stack did not match expected data."
def test_tzyxc(self): expected_volume_tzyxc, globstring = self._prepare_data( "rand_4dc", (5, 10, 50, 100, 3), "tzyxc", "t") graph = Graph() op = OpStackLoader(graph=graph) op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 5 assert op.stack.meta.getAxisKeys() == list("tzyxc") assert op.stack.meta.dtype == expected_volume_tzyxc.dtype vol_from_stack_tzyxc = op.stack[:].wait() vol_from_stack_tzyxc = vigra.taggedView(vol_from_stack_tzyxc, "tzyxc") assert (vol_from_stack_tzyxc == expected_volume_tzyxc ).all(), "4D+c Volume from stack did not match expected data."
def test_xyz(self): expected_volume, globstring = self._prepare_data( "rand_3d", (11, 99, 98), "zyx", "z") graph = Graph() op = OpStackLoader(graph=graph) op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 4 assert op.stack.meta.getAxisKeys() == list("zyxc") assert op.stack.meta.dtype == expected_volume.dtype volume_from_stack = op.stack[:].wait() volume_from_stack = vigra.taggedView(volume_from_stack, "zyxc") volume_from_stack = volume_from_stack.withAxes(*"zyx") assert (volume_from_stack == expected_volume ).all(), "3D Volume from stack did not match expected data."
def _applyPattern(self): globStrings = self.patternEdit.text() H5EXTS = OpStreamingH5N5SequenceReaderM.H5EXTS N5EXTS = OpStreamingH5N5SequenceReaderM.N5EXTS filenames = [] # see if some glob strings include HDF5 and/or N5 files globStrings = globStrings.split(os.path.pathsep) pcs = [PathComponents(x) for x in globStrings] is_h5_n5 = [x.extension in (H5EXTS + N5EXTS) for x in pcs] h5GlobStrings = os.path.pathsep.join([x for x, y in zip(globStrings, is_h5_n5) if y is True]) globStrings = os.path.pathsep.join([x for x, y in zip(globStrings, is_h5_n5) if y is False]) filenames.extend(OpStackLoader.expandGlobStrings(globStrings)) try: OpStreamingH5N5SequenceReaderS.checkGlobString(h5GlobStrings) # OK, if nothing raised there is a single h5 file in h5GlobStrings: pathComponents = PathComponents(h5GlobStrings.split(os.path.pathsep)[0]) h5file = OpStreamingH5N5Reader.get_h5_n5_file(pathComponents.externalPath, mode='r') filenames.extend( "{}/{}".format(pathComponents.externalPath, internal) for internal in OpStreamingH5N5SequenceReaderS.expandGlobStrings(h5file, h5GlobStrings)) except ( OpStreamingH5N5SequenceReaderS.WrongFileTypeError, OpStreamingH5N5SequenceReaderS.NotTheSameFileError, OpStreamingH5N5SequenceReaderS.NoInternalPlaceholderError, OpStreamingH5N5SequenceReaderS.ExternalPlaceholderError): pass try: OpStreamingH5N5SequenceReaderM.checkGlobString(h5GlobStrings) filenames.extend( "{}/{}".format(external, internal) for external, internal in zip(*OpStreamingH5N5SequenceReaderM.expandGlobStrings(h5GlobStrings)) ) except ( OpStreamingH5N5SequenceReaderM.WrongFileTypeError, OpStreamingH5N5SequenceReaderM.SameFileError, OpStreamingH5N5SequenceReaderM.NoExternalPlaceholderError, OpStreamingH5N5SequenceReaderM.InternalPlaceholderError): pass self._updateFileList(filenames)
def test_xyz_stack_c(self): # expected_volume_zyxc, globstring = self._prepare_data_zyx_stack_c() expected_volume, globstring = self._prepare_data( "rand_3d_stack_c", (2, 3, 5, 4), "czxy", "c") graph = Graph() op = OpStackLoader(graph=graph) op.SequenceAxis.setValue("c") op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 4 assert op.stack.meta.getAxisKeys() == list("czyx") assert op.stack.meta.dtype == expected_volume.dtype volume_from_stack = op.stack[:].wait() volume_from_stack = vigra.taggedView(volume_from_stack, "czyx") volume_from_stack = volume_from_stack.withAxes(*"czxy") assert (volume_from_stack == expected_volume).all( ), "3D Volume stacked along c did not match expected data."
def test_stack_pngs(self): graph = Graph() op = OpStackLoader(graph=graph) op.SequenceAxis.setValue('c') globstring = os.path.join('data', 'inputdata', '3c[0-2].png') op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 3 assert op.stack.meta.getAxisKeys() == list('xyc') stack = op.stack[:].wait() gt_path = os.path.join('data', 'inputdata', '3cRGB.h5') h5File = h5py.File(gt_path, 'r') expected = h5File['data'] assert stack.dtype == expected.dtype assert stack.shape == expected.shape assert (stack == expected).all(), \ "stacked 2d images did not match expected data."
def test_stack_pngs(self, inputdata_dir): graph = Graph() op = OpStackLoader(graph=graph) op.SequenceAxis.setValue("c") globstring = os.path.join(inputdata_dir, "3c[0-2].png") op.globstring.setValue(globstring) assert len(op.stack.meta.axistags) == 3 assert op.stack.meta.getAxisKeys() == list("xyc") stack = op.stack[:].wait() gt_path = os.path.join(inputdata_dir, "3cRGB.h5") h5File = h5py.File(gt_path, "r") expected = h5File["data"] assert stack.dtype == expected.dtype assert stack.shape == expected.shape assert (stack == expected ).all(), "stacked 2d images did not match expected data."
def _applyPattern(self): globStrings = encode_from_qstring(self.patternEdit.text()) filenames = OpStackLoader.expandGlobStrings(globStrings) self._updateFileList(filenames)
def importStackAsLocalDataset(self, info, sequence_axis='t'): """ Add the given stack data to the project file as a local dataset. Does not update the topLevelOperator. :param info: A DatasetInfo object. Note: info.filePath must be a str which lists the stack files, delimited with os.path.pathsep Note: info will be MODIFIED by this function. Use the modified info when assigning it to a dataset. """ self.progressSignal(0) projectFileHdf5 = self.topLevelOperator.ProjectFile.value globstring = info.filePath info.location = DatasetInfo.Location.ProjectInternal firstPathParts = PathComponents( info.filePath.split(os.path.pathsep)[0]) info.filePath = firstPathParts.externalDirectory + '/??' + firstPathParts.extension info.fromstack = True # Use absolute path cwd = self.topLevelOperator.WorkingDirectory if os.path.pathsep not in globstring and not os.path.isabs(globstring): globstring = os.path.normpath(os.path.join(cwd, globstring)) if firstPathParts.extension.lower() in OpTiffReader.TIFF_EXTS: # Special loader for TIFFs opLoader = OpTiffSequenceReader( parent=self.topLevelOperator.parent) opLoader.SequenceAxis.setValue(sequence_axis) opLoader.GlobString.setValue(globstring) data_slot = opLoader.Output elif firstPathParts.extension.lower() in ( OpStreamingH5N5SequenceReaderM.H5EXTS + OpStreamingH5N5SequenceReaderM.N5EXTS): # Now use the .checkGlobString method of the stack readers isSingleFile = True try: OpStreamingH5N5SequenceReaderS.checkGlobString(globstring) except (OpStreamingH5N5SequenceReaderS.NoInternalPlaceholderError, OpStreamingH5N5SequenceReaderS.NotTheSameFileError, OpStreamingH5N5SequenceReaderS.ExternalPlaceholderError): isSingleFile = False isMultiFile = True try: OpStreamingH5N5SequenceReaderM.checkGlobString(globstring) except (OpStreamingH5N5SequenceReaderM.NoExternalPlaceholderError, OpStreamingH5N5SequenceReaderM.SameFileError, OpStreamingH5N5SequenceReaderM.InternalPlaceholderError): isMultiFile = False assert (not (isMultiFile and isSingleFile)), ( "Something is wrong, glob string shouldn't allow both") assert (isMultiFile or isSingleFile), ( "Glob string doesn't conform to h5 stack glob string rules") if isSingleFile: opLoader = OpStreamingH5N5SequenceReaderS( parent=self.topLevelOperator.parent) elif isMultiFile: opLoader = OpStreamingH5N5SequenceReaderM( parent=self.topLevelOperator.parent) opLoader.SequenceAxis.setValue(sequence_axis) opLoader.GlobString.setValue(globstring) data_slot = opLoader.OutputImage else: # All other sequences (e.g. pngs, jpegs, etc.) opLoader = OpStackLoader(parent=self.topLevelOperator.parent) opLoader.SequenceAxis.setValue(sequence_axis) opLoader.globstring.setValue(globstring) data_slot = opLoader.stack try: opWriter = OpH5N5WriterBigDataset( parent=self.topLevelOperator.parent) opWriter.h5N5File.setValue(projectFileHdf5) opWriter.h5N5Path.setValue(self.topGroupName + '/local_data/' + info.datasetId) opWriter.CompressionEnabled.setValue(False) # We assume that the main bottleneck is the hard disk, # so adding lots of threads to access it at once seems like a bad idea. opWriter.BatchSize.setValue(1) opWriter.Image.connect(data_slot) # Forward progress from the writer directly to our applet opWriter.progressSignal.subscribe(self.progressSignal) success = opWriter.WriteImage.value finally: opWriter.cleanUp() opLoader.cleanUp() self.progressSignal(100) return success
def testLotsOfOptions(self): # OLD_LAZYFLOW_STATUS_MONITOR_SECONDS = os.getenv("LAZYFLOW_STATUS_MONITOR_SECONDS", None) # os.environ["LAZYFLOW_STATUS_MONITOR_SECONDS"] = "1" # NOTE: In this test, cmd-line args to tests will also end up getting "parsed" by ilastik. # That shouldn't be an issue, since the pixel classification workflow ignores unrecognized options. # See if __name__ == __main__ section, below. args = [] args.append("--project=" + self.PROJECT_FILE) args.append("--headless") # args.append( "--sys_tmp_dir=/tmp" ) # Batch export options args.append("--export_source=Simple Segmentation") args.append( "--output_format=png sequence" ) # If we were actually launching from the command line, 'png sequence' would be in quotes... args.append("--output_filename_format={dataset_dir}/{nickname}_segmentation_z{slice_index}.png") args.append("--export_dtype=uint8") args.append("--output_axis_order=zxyc") args.append("--pipeline_result_drange=(0,2)") args.append("--export_drange=(0,255)") args.append("--cutout_subregion=[(0,10,10,0,0), (1, 20, 20, 5, 1)]") args.append(self.SAMPLE_DATA) old_sys_argv = list(sys.argv) sys.argv = ["ilastik.py"] # Clear the existing commandline args so it looks like we're starting fresh. sys.argv += args # Start up the ilastik.py entry script as if we had launched it from the command line # This will execute the batch mode script try: self.ilastik_startup.main() finally: sys.argv = old_sys_argv # if OLD_LAZYFLOW_STATUS_MONITOR_SECONDS: # os.environ["LAZYFLOW_STATUS_MONITOR_SECONDS"] = OLD_LAZYFLOW_STATUS_MONITOR_SECONDS output_path = self.SAMPLE_DATA[:-4] + "_segmentation_z{slice_index}.png" globstring = output_path.format(slice_index=999) globstring = globstring.replace("999", "*") opReader = OpStackLoader(graph=Graph()) opReader.globstring.setValue(globstring) # (The OpStackLoader produces txyzc order.) opReorderAxes = OpReorderAxes(graph=Graph()) opReorderAxes.AxisOrder.setValue("tzyxc") opReorderAxes.Input.connect(opReader.stack) try: readData = opReorderAxes.Output[:].wait() # Check basic attributes assert readData.shape[:-1] == (1, 10, 10, 5), readData.shape[:-1] # Assume channel is last axis assert readData.shape[-1] == 1, "Wrong number of channels. Expected 1, got {}".format(readData.shape[-1]) finally: # Clean-up. opReorderAxes.cleanUp() opReader.cleanUp()