def debug_with_new(shell): """ (Function for debug and testing.) """ #projFilePath = "/magnetic/synapse_debug_data/object_prediction.ilp" projFilePath = "/magnetic/stuart_object_predictions.ilp" # New project shell.createAndLoadNewProject(projFilePath) workflow = shell.projectManager.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo rawInfo = DatasetInfo() #rawInfo.filePath = '/magnetic/synapse_debug_data/block256.h5/cube' #rawInfo.filePath = '/magnetic/synapse_small_4d.h5/volume/data' rawInfo.filePath = '/magnetic/validation_slices_20_40_3200_4000_1200_2000.h5/volume/data' opRawDataSelection = workflow.rawDataSelectionApplet.topLevelOperator opRawDataSelection.Dataset.resize(1) opRawDataSelection.Dataset[0].setValue(rawInfo) predictionInfo = DatasetInfo() #predictionInfo.filePath = '/magnetic/synapse_debug_data/block256_spots_predictions.h5/cube' #predictionInfo.filePath = '/magnetic/synapse_small_4d_synapse_predictions.h5/volume/data' predictionInfo.filePath = '/magnetic/validation_slices_20_40_3200_4000_1200_2000_pred.h5/volume/data' opPredDataSelection = workflow.predictionSelectionApplet.topLevelOperator opPredDataSelection.Dataset.resize(1) opPredDataSelection.Dataset[0].setValue(predictionInfo) # Select the feature drawer shell.setSelectedAppletDrawer(2)
def testBasic(self): graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') # Create a list of dataset infos . . . datasetInfos = [] # npy info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = self.testNpyFileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False datasetInfos.append(info) # png info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem info.filePath = self.testPngFileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False datasetInfos.append(info) reader.Dataset.setValues(datasetInfos) # Read the test files using the data selection operator and verify the contents npyData = reader.Image[0][...].wait() pngData = reader.Image[1][...].wait() # Check the file name output print reader.ImageName[0].value assert reader.ImageName[0].value == self.testNpyFileName assert reader.ImageName[1].value == self.testPngFileName # Check raw images assert npyData.shape == (10, 11, 1) for x in range(npyData.shape[0]): for y in range(npyData.shape[1]): assert npyData[x, y, 0] == x + y assert pngData.shape == (100, 200, 3) for x in range(pngData.shape[0]): for y in range(pngData.shape[1]): for c in range(pngData.shape[2]): assert pngData[x, y, c] == (x + y) % 256
def testBasic(self): graph = lazyflow.graph.Graph() reader = OperatorWrapper( OpDataSelection, graph=graph ) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue( os.getcwd() ) reader.ProjectDataGroup.setValue( 'DataSelection/local_data' ) # Create a list of dataset infos . . . datasetInfos = [] # npy info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = self.testNpyFileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False datasetInfos.append(info) # png info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem info.filePath = self.testPngFileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False datasetInfos.append(info) reader.Dataset.setValues(datasetInfos) # Read the test files using the data selection operator and verify the contents npyData = reader.Image[0][...].wait() pngData = reader.Image[1][...].wait() # Check the file name output print reader.ImageName[0].value assert reader.ImageName[0].value == self.testNpyFileName assert reader.ImageName[1].value == self.testPngFileName # Check raw images assert npyData.shape == (10,11,1) for x in range(npyData.shape[0]): for y in range(npyData.shape[1]): assert npyData[x,y,0] == x+y assert pngData.shape == (100, 200, 3) for x in range(pngData.shape[0]): for y in range(pngData.shape[1]): for c in range(pngData.shape[2]): assert pngData[x,y,c] == (x+y) % 256
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = self.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.DatasetGroup.resize(1) opDataSelection.DatasetGroup[0][0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]]) opFeatures.SelectionMatrix.setValue(selections) # Save and close shell.projectManager.saveProject() shell.ensureNoCurrentProject(assertClean=True)
def testWeirdAxisInfos(self): """ If we add a dataset that has the channel axis in the wrong place, the operator should automatically transpose it to be last. """ weirdAxisFilename = os.path.join(self.workingDir, 'WeirdAxes.npy') expected_data = numpy.random.random( (3,100,100) ) numpy.save(weirdAxisFilename, expected_data) info = DatasetInfo() info.filePath = weirdAxisFilename info.axistags = vigra.defaultAxistags('cxy') graph = Graph() op = OpDataSelectionGroup(graph=graph, forceAxisOrder=False) op.WorkingDirectory.setValue( self.workingDir ) op.DatasetRoles.setValue( ['RoleA'] ) op.DatasetGroup.resize( 1 ) op.DatasetGroup[0].setValue( info ) assert op.ImageGroup[0].ready() data_from_op = op.ImageGroup[0][:].wait() assert data_from_op.dtype == expected_data.dtype assert data_from_op.shape == expected_data.shape, (data_from_op.shape, expected_data.shape) assert (data_from_op == expected_data).all() # op.Image is a synonym for op.ImageGroup[0] assert op.Image.ready() assert (op.Image[:].wait() == expected_data).all() # Ensure that files opened by the inner operators are closed before we exit. op.DatasetGroup.resize(0)
def testBasic3DWrongAxes(self): """Test if 3D file with intentionally wrong axes is rejected """ for fileName in self.imgFileNames3D: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False info.axistags = vigra.defaultAxistags('tzyc') try: reader.Dataset.setValues([info]) assert False, "Should have thrown an exception!" except DatasetConstraintError: pass except: assert False, "Should have thrown a DatasetConstraintError!"
def test_fake_data_source(self): graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False}) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = self.testRawDataFileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False # Use *fake* data source info.realDataSource = False info.axistags = vigra.defaultAxistags('tczyx') info.laneShape = self.imgData.shape info.laneDtype = self.imgData.dtype reader.Dataset.setValues([info]) # Verify that now data selection operator returns fake data # with expected shape and type imgData = reader.Image[0][...].wait() assert imgData.shape == self.imgData.shape assert imgData.dtype == self.imgData.dtype expected_fake_data = numpy.zeros(info.laneShape, dtype=info.laneDtype) numpy.testing.assert_array_equal(imgData, expected_fake_data)
def testBasic3DcStackFromGlobString(self): """Test if stacked 2d 3-channel files are loaded correctly""" # For some reason vigra saves 2D+c data compressed in gifs, so skip! self.compressedExtensions.append('.gif') for fileName in self.imgFileNameGlobs2Dc: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False reader.Dataset.setValues([info]) # Read the test files using the data selection operator and verify the contents imgData3Dc = reader.Image[0][...].wait() # Check the file name output assert reader.ImageName[0].value == fileName # Check raw images assert imgData3Dc.shape == self.imgData3Dc.shape # skip this if image was saved compressed: if any(x in fileName.lower() for x in self.compressedExtensions): print("Skipping raw comparison for compressed data: {}".format( fileName)) continue numpy.testing.assert_array_equal(imgData3Dc, self.imgData3Dc)
def testBasic3DcStackFromGlobString(self): """Test if stacked 2d 3-channel files are loaded correctly""" # For some reason vigra saves 2D+c data compressed in gifs, so skip! self.compressedExtensions.append('.gif') for fileName in self.imgFileNameGlobs2Dc: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False}) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False reader.Dataset.setValues([info]) # Read the test files using the data selection operator and verify the contents imgData3Dc = reader.Image[0][...].wait() # Check the file name output assert reader.ImageName[0].value == fileName # Check raw images assert imgData3Dc.shape == self.imgData3Dc.shape, (imgData3Dc.shape, self.imgData3Dc.shape) # skip this if image was saved compressed: if any(x in fileName.lower() for x in self.compressedExtensions): print("Skipping raw comparison for compressed data: {}".format(fileName)) continue numpy.testing.assert_array_equal(imgData3Dc, self.imgData3Dc)
def test_real_data_source(self): graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False}) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = self.testRawDataFileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False #Use real data source info.realDataSource = True reader.Dataset.setValues([info]) # Read the test file using the data selection operator and verify the contents imgData = reader.Image[0][...].wait() assert imgData.shape == self.imgData.shape numpy.testing.assert_array_equal(imgData, self.imgData)
def testBasic3DWrongAxes(self): """Test if 3D file with intentionally wrong axes is rejected """ for fileName in self.imgFileNames3D: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False}) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False info.axistags = vigra.defaultAxistags('tzyc') try: reader.Dataset.setValues([info]) assert False, "Should have thrown an exception!" except DatasetConstraintError: pass except: assert False, "Should have thrown a DatasetConstraintError!"
def testBasic3D(self): """Test if plane 2d files are loaded correctly""" for fileName in self.imgFileNames3D: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False}) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False reader.Dataset.setValues([info]) # Read the test files using the data selection operator and verify the contents imgData3D = reader.Image[0][...].wait() # Check the file name output assert reader.ImageName[0].value == fileName # Check raw images assert imgData3D.shape == self.imgData3D.shape, (imgData3D.shape, self.imgData3D.shape) # skip this if image was saved compressed: numpy.testing.assert_array_equal(imgData3D, self.imgData3D)
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath) workflow = shell.projectManager.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = self.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds) opFeatures.Scales.setValue([0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0]) # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False]]) opFeatures.SelectionMatrix.setValue(selections)
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath) workflow = shell.projectManager.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = self.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds ) opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] ) # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False], [True, True, True, True, True, True, False]] ) opFeatures.SelectionMatrix.setValue(selections)
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow from ilastik.applets.dataSelection.opDataSelection import DatasetInfo opDataSelection = workflow.dataSelectionApplet.topLevelOperator for i, dataFile in enumerate(self.SAMPLE_DATA): # Add a file info = DatasetInfo() info.filePath = dataFile opDataSelection.DatasetGroup.resize(i + 1) opDataSelection.DatasetGroup[i][0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds) opFeatures.Scales.setValue([0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0]) # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]]) opFeatures.SelectionMatrix.setValue(selections) # Save and close shell.projectManager.saveProject() shell.ensureNoCurrentProject(assertClean=True)
def debug_with_new(shell, workflow): """ (Function for debug and testing.) """ projFilePath = "/magnetic/test_watershed_project.ilp" # New project shell.createAndLoadNewProject(projFilePath) # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() #info.filePath = '/magnetic/gigacube.h5' #info.filePath = '/magnetic/synapse_small.npy' info.filePath = '/magnetic/synapse_small.npy_results.h5' #info.filePath = '/magnetic/singleslice.h5' opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) # Select the watershed drawer shell.setSelectedAppletDrawer(1) # Save the project shell.onSaveProjectActionTriggered()
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow from ilastik.applets.dataSelection.opDataSelection import DatasetInfo opDataSelection = workflow.dataSelectionApplet.topLevelOperator for i, dataFile in enumerate(self.SAMPLE_DATA): # Add a file info = DatasetInfo() info.filePath = dataFile opDataSelection.DatasetGroup.resize(i + 1) opDataSelection.DatasetGroup[i][0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]]) opFeatures.SelectionMatrix.setValue(selections)
def testProjectLocalData(self): graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') # Create a list of dataset infos . . . datasetInfos = [] # From project info = DatasetInfo() info.location = DatasetInfo.Location.ProjectInternal info.filePath = "This string should be ignored..." info._datasetId = 'dataset1' # (Cheating a bit here...) info.invertColors = False info.convertToGrayscale = False datasetInfos.append(info) reader.Dataset.setValues(datasetInfos) projectInternalData = reader.Image[0][...].wait() assert projectInternalData.shape == self.imgData3Dc.shape assert (projectInternalData == self.imgData3Dc).all()
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath) workflow = shell.projectManager.workflow from ilastik.applets.dataSelection.opDataSelection import DatasetInfo opDataSelection = workflow.dataSelectionApplet.topLevelOperator for i, dataFile in enumerate(self.SAMPLE_DATA): # Add a file info = DatasetInfo() info.filePath = dataFile opDataSelection.Dataset.resize(i+1) opDataSelection.Dataset[i].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds ) opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] ) # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]] ) opFeatures.SelectionMatrix.setValue(selections) # Save and close shell.projectManager.saveProject() shell.ensureNoCurrentProject(assertClean=True)
def testProjectLocalData(self): graph = lazyflow.graph.Graph() reader = OperatorWrapper( OpDataSelection, graph=graph ) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue( os.getcwd() ) reader.ProjectDataGroup.setValue( 'DataSelection/local_data' ) # Create a list of dataset infos . . . datasetInfos = [] # From project info = DatasetInfo() info.location = DatasetInfo.Location.ProjectInternal info.filePath = "This string should be ignored..." info._datasetId = 'dataset1' # (Cheating a bit here...) info.invertColors = False info.convertToGrayscale = False datasetInfos.append(info) reader.Dataset.setValues(datasetInfos) projectInternalData = reader.Image[0][...].wait() assert projectInternalData.shape == self.pngData.shape assert (projectInternalData == self.pngData).all()
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell workflow = self.workflow # New project shell.createAndLoadNewProject(projFilePath) # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = self.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) # Set some features featureGui = workflow.featureSelectionApplet.gui opFeatures = workflow.featureSelectionApplet.topLevelOperator # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]] ) opFeatures.SelectionMatrix.setValue(selections) # Save and close shell.projectManager.saveProject() shell.ensureNoCurrentProject(assertClean=True)
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow from ilastik.applets.dataSelection.opDataSelection import DatasetInfo opDataSelection = workflow.dataSelectionApplet.topLevelOperator for i, dataFile in enumerate(self.SAMPLE_DATA): # Add a file info = DatasetInfo() info.filePath = dataFile opDataSelection.DatasetGroup.resize(i+1) opDataSelection.DatasetGroup[i][0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]] ) opFeatures.SelectionMatrix.setValue(selections)
def testBasic2D(self): """Test if plane 2d files are loaded correctly""" for fileName in self.imgFileNames2D: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False reader.Dataset.setValues([info]) # Read the test files using the data selection operator and verify the contents imgData2D = reader.Image[0][...].wait() # Check the file name output assert reader.ImageName[0].value == fileName # Check raw images assert imgData2D.shape == self.imgData2D.shape # skip this if image was saved compressed: if any(x in fileName.lower() for x in self.compressedExtensions): print("Skipping raw comparison for compressed data: {}".format(fileName)) continue numpy.testing.assert_array_equal(imgData2D, self.imgData2D)
def testBasic2D(self): """Test if plane 2d files are loaded correctly""" for fileName in self.imgFileNames2D: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False}) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False reader.Dataset.setValues([info]) # Read the test files using the data selection operator and verify the contents imgData2D = reader.Image[0][...].wait() # Check the file name output assert reader.ImageName[0].value == fileName # Check raw images assert imgData2D.shape == self.imgData2D.shape # skip this if image was saved compressed: if any(x in fileName.lower() for x in self.compressedExtensions): print("Skipping raw comparison for compressed data: {}".format( fileName)) continue numpy.testing.assert_array_equal(imgData2D, self.imgData2D)
def debug_with_new(shell, workflow): """ (Function for debug and testing.) """ projFilePath = "/magnetic/test_watershed_project.ilp" # New project shell.createAndLoadNewProject(projFilePath) # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() #info.filePath = '/magnetic/gigacube.h5' #info.filePath = '/magnetic/synapse_small.npy' info.filePath = '/magnetic/synapse_small.npy_results.h5' #info.filePath = '/magnetic/singleslice.h5' opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) # Select the watershed drawer shell.setSelectedAppletDrawer(1) # Save the project shell.onSaveProjectActionTriggered()
def testBasic3D(self): """Test if plane 2d files are loaded correctly""" for fileName in self.imgFileNames3D: graph = lazyflow.graph.Graph() reader = OperatorWrapper(OpDataSelection, graph=graph) reader.ProjectFile.setValue(self.projectFile) reader.WorkingDirectory.setValue(os.getcwd()) reader.ProjectDataGroup.setValue('DataSelection/local_data') info = DatasetInfo() # Will be read from the filesystem since the data won't be found in the project file. info.location = DatasetInfo.Location.ProjectInternal info.filePath = fileName info.internalPath = "" info.invertColors = False info.convertToGrayscale = False reader.Dataset.setValues([info]) # Read the test files using the data selection operator and verify the contents imgData3D = reader.Image[0][...].wait() # Check the file name output assert reader.ImageName[0].value == fileName # Check raw images assert imgData3D.shape == self.imgData3D.shape # skip this if image was saved compressed: numpy.testing.assert_array_equal(imgData3D, self.imgData3D)
def create_new_tst_project(cls): # Instantiate 'shell' shell = HeadlessShell( ) # Create a blank project file and load it. newProjectFilePath = cls.PROJECT_FILE newProjectFile = ProjectManager.createBlankProjectFile(newProjectFilePath, PixelClassificationWorkflow, []) newProjectFile.close() shell.openProjectFile(newProjectFilePath) workflow = shell.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = cls.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.DatasetGroup.resize(1) opDataSelection.DatasetGroup[0][0].setValue(info) # Set some features ScalesList = [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] FeatureIds = [ 'GaussianSmoothing', 'LaplacianOfGaussian', 'StructureTensorEigenvalues', 'HessianOfGaussianEigenvalues', 'GaussianGradientMagnitude', 'DifferenceOfGaussians' ] opFeatures = workflow.featureSelectionApplet.topLevelOperator opFeatures.Scales.setValue( ScalesList ) opFeatures.FeatureIds.setValue( FeatureIds ) # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]] ) opFeatures.SelectionMatrix.setValue(selections) # Add some labels directly to the operator opPixelClass = workflow.pcApplet.topLevelOperator opPixelClass.LabelNames.setValue(['Label 1', 'Label 2']) slicing1 = sl[0:1,0:10,0:10,0:1,0:1] labels1 = 1 * numpy.ones(slicing2shape(slicing1), dtype=numpy.uint8) opPixelClass.LabelInputs[0][slicing1] = labels1 slicing2 = sl[0:1,0:10,10:20,0:1,0:1] labels2 = 2 * numpy.ones(slicing2shape(slicing2), dtype=numpy.uint8) opPixelClass.LabelInputs[0][slicing2] = labels2 # Save and close shell.projectManager.saveProject() del shell
def testCreateExportDirectory(self): """ Test that the batch operator can create the export directory if it doesn't exist yet. """ # Start by writing some test data to disk. self.testData = numpy.random.random((1, 10, 10, 10, 1)) numpy.save(self.testDataFileName, self.testData) cwd = os.getcwd() info = DatasetInfo() info.filePath = os.path.join(cwd, 'NpyTestData.npy') graph = Graph() opBatchIo = OpBatchIo(graph=graph) opInput = OpInputDataReader(graph=graph) opInput.FilePath.setValue(info.filePath) # Our test "processing pipeline" is just a smoothing operator. opSmooth = OpGaussianSmoothing(graph=graph) opSmooth.Input.connect(opInput.Output) opSmooth.sigma.setValue(3.0) exportDir = os.path.join(cwd, 'exported_data') opBatchIo.ExportDirectory.setValue(exportDir) opBatchIo.Suffix.setValue('_smoothed') opBatchIo.Format.setValue(ExportFormat.H5) opBatchIo.DatasetPath.setValue(info.filePath) internalPath = 'path/to/data' opBatchIo.InternalPath.setValue(internalPath) opBatchIo.ImageToExport.connect(opSmooth.Output) dirty = opBatchIo.Dirty.value assert dirty == True outputPath = opBatchIo.OutputDataPath.value assert outputPath == os.path.join(exportDir, 'NpyTestData_smoothed.h5', internalPath) result = opBatchIo.ExportResult.value assert result dirty = opBatchIo.Dirty.value assert dirty == False # Check the file smoothedPath = PathComponents(outputPath).externalPath with h5py.File(smoothedPath, 'r') as f: assert internalPath in f assert f[internalPath].shape == self.testData.shape try: os.remove(smoothedPath) os.rmdir(exportDir) except: pass
def testCreateExportDirectory(self): """ Test that the batch operator can create the export directory if it doesn't exist yet. """ # Start by writing some test data to disk. self.testData = numpy.random.random((1,10,10,10,1)) numpy.save(self.testDataFileName, self.testData) cwd = os.getcwd() info = DatasetInfo() info.filePath = os.path.join(cwd, 'NpyTestData.npy') graph = Graph() opBatchIo = OpBatchIo(graph=graph) opInput = OpInputDataReader(graph=graph) opInput.FilePath.setValue( info.filePath ) # Our test "processing pipeline" is just a smoothing operator. opSmooth = OpGaussianSmoothing(graph=graph) opSmooth.Input.connect( opInput.Output ) opSmooth.sigma.setValue(3.0) exportDir = os.path.join(cwd, 'exported_data') opBatchIo.ExportDirectory.setValue( exportDir ) opBatchIo.Suffix.setValue( '_smoothed' ) opBatchIo.Format.setValue( ExportFormat.H5 ) opBatchIo.DatasetPath.setValue( info.filePath ) opBatchIo.WorkingDirectory.setValue( cwd ) internalPath = 'path/to/data' opBatchIo.InternalPath.setValue( internalPath ) opBatchIo.ImageToExport.connect( opSmooth.Output ) dirty = opBatchIo.Dirty.value assert dirty == True outputPath = opBatchIo.OutputDataPath.value assert outputPath == os.path.join(exportDir, 'NpyTestData_smoothed.h5', internalPath) result = opBatchIo.ExportResult.value assert result dirty = opBatchIo.Dirty.value assert dirty == False # Check the file smoothedPath = PathComponents(outputPath).externalPath with h5py.File(smoothedPath, 'r') as f: assert internalPath in f assert f[internalPath].shape == self.testData.shape try: os.remove(smoothedPath) os.rmdir(exportDir) except: pass
def create_new_tst_project(cls): # Instantiate 'shell' shell = HeadlessShell() # Create a blank project file and load it. newProjectFilePath = cls.PROJECT_FILE newProjectFile = ProjectManager.createBlankProjectFile( newProjectFilePath, PixelClassificationWorkflow, []) newProjectFile.close() shell.openProjectFile(newProjectFilePath) workflow = shell.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = cls.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.DatasetGroup.resize(1) opDataSelection.DatasetGroup[0][0].setValue(info) # Set some features ScalesList = [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] FeatureIds = [ 'GaussianSmoothing', 'LaplacianOfGaussian', 'StructureTensorEigenvalues', 'HessianOfGaussianEigenvalues', 'GaussianGradientMagnitude', 'DifferenceOfGaussians' ] opFeatures = workflow.featureSelectionApplet.topLevelOperator opFeatures.Scales.setValue(ScalesList) opFeatures.FeatureIds.setValue(FeatureIds) # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]]) opFeatures.SelectionMatrix.setValue(selections) # Add some labels directly to the operator opPixelClass = workflow.pcApplet.topLevelOperator slicing1 = sl[0:1, 0:10, 0:10, 0:1, 0:1] labels1 = 1 * numpy.ones(slicing2shape(slicing1), dtype=numpy.uint8) opPixelClass.LabelInputs[0][slicing1] = labels1 slicing2 = sl[0:1, 0:10, 10:20, 0:1, 0:1] labels2 = 2 * numpy.ones(slicing2shape(slicing2), dtype=numpy.uint8) opPixelClass.LabelInputs[0][slicing2] = labels2 # Save and close shell.projectManager.saveProject() del shell
def test(self): """ Make sure that the dataset roles work the way we expect them to. """ infoA = DatasetInfo() infoA.filePath = self.group1Data[0][0] infoC = DatasetInfo() infoC.filePath = self.group1Data[1][0] graph = Graph() op = OpDataSelectionGroup(graph=graph) op.WorkingDirectory.setValue(self.workingDir) op.DatasetRoles.setValue(['RoleA', 'RoleB', 'RoleC']) op.DatasetGroup.resize(3) op.DatasetGroup[0].setValue(infoA) # Leave RoleB blank -- datasets other than the first are optional op.DatasetGroup[2].setValue(infoC) assert op.ImageGroup[0].ready() assert op.ImageGroup[2].ready() expectedDataA = self.group1Data[0][1] dataFromOpA = op.ImageGroup[0][:].wait() assert dataFromOpA.dtype == expectedDataA.dtype assert dataFromOpA.shape == expectedDataA.shape assert (dataFromOpA == expectedDataA).all() expectedDataC = self.group1Data[0][1] dataFromOpC = op.ImageGroup[0][:].wait() assert dataFromOpC.dtype == expectedDataC.dtype assert dataFromOpC.shape == expectedDataC.shape assert (dataFromOpC == expectedDataC).all() assert op.Image.ready() assert (op.Image[:].wait() == expectedDataA).all() # Ensure that files opened by the inner operators are closed before we exit. op.DatasetGroup.resize(0)
def test(self): """ Make sure that the dataset roles work the way we expect them to. """ infoA = DatasetInfo() infoA.filePath = self.group1Data[0][0] infoC = DatasetInfo() infoC.filePath = self.group1Data[1][0] graph = Graph() op = OpDataSelectionGroup( graph=graph ) op.WorkingDirectory.setValue( self.workingDir ) op.DatasetRoles.setValue( ['RoleA', 'RoleB', 'RoleC'] ) op.DatasetGroup.resize( 3 ) op.DatasetGroup[0].setValue( infoA ) # Leave RoleB blank -- datasets other than the first are optional op.DatasetGroup[2].setValue( infoC ) assert op.ImageGroup[0].ready() assert op.ImageGroup[2].ready() expectedDataA = self.group1Data[0][1] dataFromOpA = op.ImageGroup[0][:].wait() assert dataFromOpA.dtype == expectedDataA.dtype assert dataFromOpA.shape == expectedDataA.shape assert (dataFromOpA == expectedDataA).all() expectedDataC = self.group1Data[0][1] dataFromOpC = op.ImageGroup[0][:].wait() assert dataFromOpC.dtype == expectedDataC.dtype assert dataFromOpC.shape == expectedDataC.shape assert (dataFromOpC == expectedDataC).all() assert op.Image.ready() assert (op.Image[:].wait() == expectedDataA).all() # Ensure that files opened by the inner operators are closed before we exit. op.DatasetGroup.resize(0)
def append_lane(workflow, input_filepath, axisorder=None): # Sanity checks assert isinstance(workflow, PixelClassificationWorkflow) opPixelClassification = workflow.pcApplet.topLevelOperator assert opPixelClassification.Classifier.ready() # If the filepath is a globstring, convert the stack to h5 input_filepath = DataSelectionApplet.convertStacksToH5( [input_filepath], TMP_DIR )[0] info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem info.filePath = input_filepath comp = PathComponents(input_filepath) # Convert all (non-url) paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) if not isUrl(input_filepath): comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() info.nickname = comp.filenameBase if axisorder: info.axistags = vigra.defaultAxistags(axisorder) logger.debug( "adding lane: {}".format( info ) ) opDataSelection = workflow.dataSelectionApplet.topLevelOperator # Add a lane num_lanes = len( opDataSelection.DatasetGroup )+1 logger.debug( "num_lanes: {}".format( num_lanes ) ) opDataSelection.DatasetGroup.resize( num_lanes ) # Configure it. role_index = 0 # raw data opDataSelection.DatasetGroup[-1][role_index].setValue( info ) # Sanity check assert len(opPixelClassification.InputImages) == num_lanes return opPixelClassification
def impl(): projFilePath = self.project_file shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow # Add our input files: opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.DatasetGroup.resize(1) info_raw = DatasetInfo() info_raw.filePath = self.sample_data_raw opDataSelection.DatasetGroup[0][0].setValue(info_raw) info_prob = DatasetInfo() info_prob.filePath = self.sample_data_prob info_raw.nickname = 'test_data' opDataSelection.DatasetGroup[0][1].setValue(info_prob) # Save shell.projectManager.saveProject()
def impl(): projFilePath = self.project_file shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow # Add our input files: opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.DatasetGroup.resize(1) info_raw = DatasetInfo() info_raw.filePath = self.sample_data_raw opDataSelection.DatasetGroup[0][0].setValue(info_raw) info_prob = DatasetInfo() info_prob.filePath = self.sample_data_prob info_raw.nickname = 'test_data' opDataSelection.DatasetGroup[0][1].setValue(info_prob) # Save shell.projectManager.saveProject()
def append_lane(workflow, input_filepath, axisorder=None): """ Add a lane to the project file for the given input file. If axisorder is given, override the default axisorder for the file and force the project to use the given one. Globstrings are supported, in which case the files are converted to HDF5 first. """ # If the filepath is a globstring, convert the stack to h5 input_filepath = DataSelectionApplet.convertStacksToH5( [input_filepath], tempfile.mkdtemp() )[0] info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem info.filePath = input_filepath comp = PathComponents(input_filepath) # Convert all (non-url) paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) if not isUrl(input_filepath): comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() info.nickname = comp.filenameBase if axisorder: info.axistags = vigra.defaultAxistags(axisorder) logger.debug( "adding lane: {}".format( info ) ) opDataSelection = workflow.dataSelectionApplet.topLevelOperator # Add a lane num_lanes = len( opDataSelection.DatasetGroup )+1 logger.debug( "num_lanes: {}".format( num_lanes ) ) opDataSelection.DatasetGroup.resize( num_lanes ) # Configure it. role_index = 0 # raw data opDataSelection.DatasetGroup[-1][role_index].setValue( info )
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = self.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.DatasetGroup.resize(1) opDataSelection.DatasetGroup[0][0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [ [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], ] ) opFeatures.SelectionMatrix.setValue(selections) workflow = self.shell.projectManager.workflow countingClassApplet = workflow.countingApplet gui = countingClassApplet.getMultiLaneGui() opCount = countingClassApplet.topLevelOperator opCount.opTrain.Sigma.setValue(self.COUNTING_SIGMA) # Select the labeling drawer self.shell.setSelectedAppletDrawer(COUNTING_APPLET_INDEX) # Turn off the huds and so we can capture the raw image viewMenu = gui.currentGui().menus()[0] viewMenu.actionToggleAllHuds.trigger() # Save and close shell.projectManager.saveProject() shell.ensureNoCurrentProject(assertClean=True)
def basicImpl(self): cwd = os.getcwd() info = DatasetInfo() info.filePath = os.path.join(cwd, self.testDataFileName) graph = Graph() opBatchIo = OpBatchIo(graph=graph) opInput = OpInputDataReader(graph=graph) opInput.FilePath.setValue( info.filePath ) # Our test "processing pipeline" is just a smoothing operator. opSmooth = OpGaussianSmoothing(graph=graph) opSmooth.Input.connect( opInput.Output ) opSmooth.sigma.setValue(3.0) opBatchIo.ExportDirectory.setValue( '' ) opBatchIo.Suffix.setValue( '_smoothed' ) opBatchIo.Format.setValue( ExportFormat.H5 ) opBatchIo.DatasetPath.setValue( info.filePath ) opBatchIo.WorkingDirectory.setValue( cwd ) internalPath = 'path/to/data' opBatchIo.InternalPath.setValue( internalPath ) opBatchIo.ImageToExport.connect( opSmooth.Output ) dirty = opBatchIo.Dirty.value assert dirty == True outputPath = opBatchIo.OutputDataPath.value assert outputPath == os.path.join(cwd, 'NpyTestData_smoothed.h5/' + internalPath) result = opBatchIo.ExportResult.value assert result dirty = opBatchIo.Dirty.value assert dirty == False # Check the file smoothedPath = os.path.join(cwd, 'NpyTestData_smoothed.h5') with h5py.File(smoothedPath, 'r') as f: assert internalPath in f assert f[internalPath].shape == self.expectedDataShape assert (f[internalPath][:] == opSmooth.Output[:].wait()).all() try: os.remove(smoothedPath) except: pass # Check the exported image assert ( opBatchIo.ExportedImage[:].wait() == opSmooth.Output[:].wait() ).all()
def loadProject(shell, workflow): if not os.path.exists(projectFilename): shell.createAndLoadNewProject(projectFilename) else: shell.openProjectFile(projectFilename) workflow.setCarvingGraphFile(carvingGraphFilename) # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = carvingGraphFilename + "/graph/raw" opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) shell.setSelectedAppletDrawer(2)
def impl(): projFilePath = self.PROJECT_FILE shell = self.shell # New project shell.createAndLoadNewProject(projFilePath, self.workflowClass()) workflow = shell.projectManager.workflow # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = self.SAMPLE_DATA opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.DatasetGroup.resize(1) opDataSelection.DatasetGroup[0][0].setValue(info) # Set some features opFeatures = workflow.featureSelectionApplet.topLevelOperator opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds ) opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] ) # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 selections = numpy.array( [[True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]] ) opFeatures.SelectionMatrix.setValue(selections) #shell.setSelectedAppletDrawer(5) workflow = self.shell.projectManager.workflow countingClassApplet = workflow.countingApplet gui = countingClassApplet.getMultiLaneGui() opCount = countingClassApplet.topLevelOperator # Select the labeling drawer self.shell.setSelectedAppletDrawer(3) # Turn off the huds and so we can capture the raw image viewMenu = gui.currentGui().menus()[0] viewMenu.actionToggleAllHuds.trigger() # Save and close shell.projectManager.saveProject() shell.ensureNoCurrentProject(assertClean=True)
def test(self): infoA = DatasetInfo() infoA.filePath = self.group1Data[0][0] infoC = DatasetInfo() infoC.filePath = self.group1Data[1][0] graph = Graph() op = OpDataSelectionGroup(graph=graph) op.WorkingDirectory.setValue(self.workingDir) op.DatasetRoles.setValue(['RoleA', 'RoleB', 'RoleC']) op.DatasetGroup.resize(3) op.DatasetGroup[0].setValue(infoA) # Leave RoleB blank -- datasets other than the first are optional op.DatasetGroup[2].setValue(infoC) assert op.ImageGroup[0].ready() assert op.ImageGroup[2].ready() expectedDataA = self.group1Data[0][1] dataFromOpA = op.ImageGroup[0][:].wait() assert dataFromOpA.dtype == expectedDataA.dtype assert dataFromOpA.shape == expectedDataA.shape assert (dataFromOpA == expectedDataA).all() expectedDataC = self.group1Data[0][1] dataFromOpC = op.ImageGroup[0][:].wait() assert dataFromOpC.dtype == expectedDataC.dtype assert dataFromOpC.shape == expectedDataC.shape assert (dataFromOpC == expectedDataC).all() assert op.Image.ready() assert (op.Image[:].wait() == expectedDataA).all()
def debug_with_new(shell): """ (Function for debug and testing.) """ projFilePath = "/magnetic/test_project.ilp" # New project shell.createAndLoadNewProject(projFilePath) workflow = shell.projectManager.workflow from ilastik.applets.dataSelection.opDataSelection import DatasetInfo rawInfo = DatasetInfo() rawInfo.filePath = '/magnetic/synapse_small.npy' opDataSelection = workflow.rawDataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(rawInfo) binaryInfo = DatasetInfo() binaryInfo.filePath = '/magnetic/synapse_small_binary.npy' opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(binaryInfo)
def test(self): infoA = DatasetInfo() infoA.filePath = self.group1Data[0][0] infoC = DatasetInfo() infoC.filePath = self.group1Data[1][0] graph = Graph() op = OpDataSelectionGroup( graph=graph ) op.WorkingDirectory.setValue( self.workingDir ) op.DatasetRoles.setValue( ['RoleA', 'RoleB', 'RoleC'] ) op.DatasetGroup.resize( 3 ) op.DatasetGroup[0].setValue( infoA ) # Leave RoleB blank -- datasets other than the first are optional op.DatasetGroup[2].setValue( infoC ) assert op.ImageGroup[0].ready() assert op.ImageGroup[2].ready() expectedDataA = self.group1Data[0][1] dataFromOpA = op.ImageGroup[0][:].wait() assert dataFromOpA.dtype == expectedDataA.dtype assert dataFromOpA.shape == expectedDataA.shape assert (dataFromOpA == expectedDataA).all() expectedDataC = self.group1Data[0][1] dataFromOpC = op.ImageGroup[0][:].wait() assert dataFromOpC.dtype == expectedDataC.dtype assert dataFromOpC.shape == expectedDataC.shape assert (dataFromOpC == expectedDataC).all() assert op.Image.ready() assert (op.Image[:].wait() == expectedDataA).all()
def basicImpl(self): cwd = os.getcwd() info = DatasetInfo() info.filePath = os.path.join(cwd, 'NpyTestData.npy') graph = Graph() opBatchIo = OpBatchIo(graph=graph) opInput = OpInputDataReader(graph=graph) opInput.FilePath.setValue(info.filePath) # Our test "processing pipeline" is just a smoothing operator. opSmooth = OpGaussianSmoothing(graph=graph) opSmooth.Input.connect(opInput.Output) opSmooth.sigma.setValue(3.0) opBatchIo.ExportDirectory.setValue('') opBatchIo.Suffix.setValue('_smoothed') opBatchIo.Format.setValue(ExportFormat.H5) opBatchIo.DatasetPath.setValue(info.filePath) internalPath = 'path/to/data' opBatchIo.InternalPath.setValue(internalPath) opBatchIo.ImageToExport.connect(opSmooth.Output) dirty = opBatchIo.Dirty.value assert dirty == True outputPath = opBatchIo.OutputDataPath.value assert outputPath == os.path.join( cwd, 'NpyTestData_smoothed.h5/' + internalPath) result = opBatchIo.ExportResult.value assert result dirty = opBatchIo.Dirty.value assert dirty == False # Check the file smoothedPath = os.path.join(cwd, 'NpyTestData_smoothed.h5') with h5py.File(smoothedPath, 'r') as f: assert internalPath in f assert f[internalPath].shape == self.expectedDataShape try: os.remove(smoothedPath) except: pass
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName): """ Compute the predictions for each of the specified batch input files, and export them to corresponding h5 files. """ batchInputPaths = convertStacksToH5(batchInputPaths) batchInputInfos = [] for p in batchInputPaths: info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem # Convert all paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) comp = PathComponents(p) comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() batchInputInfos.append(info) # Configure batch input operator opBatchInputs = workflow.batchInputApplet.topLevelOperator opBatchInputs.Dataset.setValues( batchInputInfos ) # Configure batch export operator opBatchResults = workflow.batchResultsApplet.topLevelOperator opBatchResults.ExportDirectory.setValue(batchExportDir) opBatchResults.Format.setValue(ExportFormat.H5) opBatchResults.Suffix.setValue(batchOutputSuffix) opBatchResults.InternalPath.setValue(exportedDatasetName) opBatchResults.SelectedSlices.setValue([30]) logger.info( "Exporting data to " + opBatchResults.OutputDataPath[0].value ) # Set up progress display handling (just logging for now) currentProgress = [None] def handleProgress(percentComplete): if currentProgress[0] != percentComplete: currentProgress[0] = percentComplete logger.info("Batch job: {}% complete.".format(percentComplete)) progressSignal = opBatchResults.ProgressSignal[0].value progressSignal.subscribe( handleProgress ) # Make it happen! result = opBatchResults.ExportResult[0].value return result
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName): """ Compute the predictions for each of the specified batch input files, and export them to corresponding h5 files. """ batchInputPaths = convertStacksToH5(batchInputPaths) batchInputInfos = [] for p in batchInputPaths: info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem # Convert all paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) comp = PathComponents(p) comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() batchInputInfos.append(info) # Configure batch input operator opBatchInputs = workflow.batchInputApplet.topLevelOperator opBatchInputs.Dataset.setValues( batchInputInfos ) # Configure batch export operator opBatchResults = workflow.batchResultsApplet.topLevelOperator opBatchResults.ExportDirectory.setValue(batchExportDir) opBatchResults.Format.setValue(ExportFormat.H5) opBatchResults.Suffix.setValue(batchOutputSuffix) opBatchResults.InternalPath.setValue(exportedDatasetName) opBatchResults.SelectedSlices.setValue([30]) logger.info( "Exporting data to " + opBatchResults.OutputDataPath[0].value ) # Set up progress display handling (just logging for now) currentProgress = [None] def handleProgress(percentComplete): if currentProgress[0] != percentComplete: currentProgress[0] = percentComplete logger.info("Batch job: {}% complete.".format(percentComplete)) progressSignal = opBatchResults.ProgressSignal[0].value progressSignal.subscribe( handleProgress ) # Make it happen! result = opBatchResults.ExportResult[0].value return result
def _run_export_with_empty_batch_lane(self, role_input_paths, batch_lane_index, template_infos, progress_callback): """ Configure the fresh batch lane with the given input files, and export the results. """ assert role_input_paths[0], "At least one file must be provided for each dataset (the first role)." opDataSelectionBatchLaneView = self.dataSelectionApplet.topLevelOperator.getLane( batch_lane_index ) # Apply new settings for each role for role_index, path_for_role in enumerate(role_input_paths): if not path_for_role: continue if template_infos[role_index]: info = copy.copy(template_infos[role_index]) else: info = DatasetInfo() # Override the template settings with the current filepath. default_info = DataSelectionApplet.create_default_headless_dataset_info(path_for_role) info.filePath = default_info.filePath info.location = default_info.location info.nickname = default_info.nickname # Apply to the data selection operator opDataSelectionBatchLaneView.DatasetGroup[role_index].setValue(info) # Make sure nothing went wrong opDataExportBatchlaneView = self.dataExportApplet.topLevelOperator.getLane( batch_lane_index ) assert opDataExportBatchlaneView.ImageToExport.ready() assert opDataExportBatchlaneView.ExportPath.ready() # New lanes were added. # Give the workflow a chance to restore anything that was unecessarily invalidated (e.g. classifiers) self.workflow.handleNewLanesAdded() # Call customization hook self.dataExportApplet.prepare_lane_for_export(batch_lane_index) # Finally, run the export logger.info("Exporting to {}".format( opDataExportBatchlaneView.ExportPath.value )) opDataExportBatchlaneView.progressSignal.subscribe(progress_callback) opDataExportBatchlaneView.run_export() # Call customization hook self.dataExportApplet.post_process_lane_export(batch_lane_index)
def debug_with_new(shell, workflow): """ (Function for debug and testing.) """ projFilePath = "/magnetic/test_project.ilp" # New project shell.createAndLoadNewProject(projFilePath) # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = '/magnetic/gigacube.h5' #info.filePath = '/magnetic/synapse_small.npy' #info.filePath = '/magnetic/singleslice.h5' opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) # Set some features import numpy featureGui = workflow.featureSelectionApplet.gui opFeatures = workflow.featureSelectionApplet.topLevelOperator # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 # selections = numpy.array( [[True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True]] ) selections = numpy.array( [[True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]]) opFeatures.SelectionMatrix.setValue(selections) # Select the feature drawer shell.setSelectedAppletDrawer(2) # Save the project shell.onSaveProjectActionTriggered()
def debug_with_new(shell, workflow): """ (Function for debug and testing.) """ projFilePath = "/magnetic/test_project.ilp" # New project shell.createAndLoadNewProject(projFilePath) # Add a file from ilastik.applets.dataSelection.opDataSelection import DatasetInfo info = DatasetInfo() info.filePath = '/magnetic/gigacube.h5' #info.filePath = '/magnetic/synapse_small.npy' #info.filePath = '/magnetic/singleslice.h5' opDataSelection = workflow.dataSelectionApplet.topLevelOperator opDataSelection.Dataset.resize(1) opDataSelection.Dataset[0].setValue(info) # Set some features import numpy featureGui = workflow.featureSelectionApplet.gui opFeatures = workflow.featureSelectionApplet.topLevelOperator # sigma: 0.3 0.7 1.0 1.6 3.5 5.0 10.0 # selections = numpy.array( [[True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True], # [True, True, True, True, True, True, True]] ) selections = numpy.array( [[True, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False]] ) opFeatures.SelectionMatrix.setValue(selections) # Select the feature drawer shell.setSelectedAppletDrawer(2) # Save the project shell.onSaveProjectActionTriggered()
def testNoChannelAxis(self): """ If we add a dataset that is missing a channel axis altogether, the operator should automatically append a channel axis. """ noChannelFilename = os.path.join(self.workingDir, 'NoChannelAxis.npy') noChannelData = numpy.random.random( (100,100) ) numpy.save(noChannelFilename, noChannelData) info = DatasetInfo() info.filePath = noChannelFilename info.axistags = vigra.defaultAxistags('xy') graph = Graph() op = OpDataSelectionGroup( graph=graph ) op.WorkingDirectory.setValue( self.workingDir ) op.DatasetRoles.setValue( ['RoleA'] ) op.DatasetGroup.resize( 1 ) op.DatasetGroup[0].setValue( info ) assert op.ImageGroup[0].ready() # Note that we expect a channel axis to be appended to the data. expected_data = noChannelData[:,:,numpy.newaxis] data_from_op = op.ImageGroup[0][:].wait() assert data_from_op.dtype == expected_data.dtype assert data_from_op.shape == expected_data.shape assert (data_from_op == expected_data).all() # op.Image is a synonym for op.ImageGroup[0] assert op.Image.ready() assert (op.Image[:].wait() == expected_data).all() # Ensure that files opened by the inner operators are closed before we exit. op.DatasetGroup.resize(0)
def testWeirdAxisInfos(self): """ If we add a dataset that has the channel axis in the wrong place, the operator should automatically transpose it to be last. """ weirdAxisFilename = os.path.join(self.workingDir, 'WeirdAxes.npy') weirdAxisData = numpy.random.random( (3,100,100) ) numpy.save(weirdAxisFilename, weirdAxisData) info = DatasetInfo() info.filePath = weirdAxisFilename info.axistags = vigra.defaultAxistags('cxy') graph = Graph() op = OpDataSelectionGroup( graph=graph ) op.WorkingDirectory.setValue( self.workingDir ) op.DatasetRoles.setValue( ['RoleA'] ) op.DatasetGroup.resize( 1 ) op.DatasetGroup[0].setValue( info ) assert op.ImageGroup[0].ready() # Note that we expect the channel axis to be transposed to be last. expected_data = weirdAxisData.transpose( 1,2,0 ) data_from_op = op.ImageGroup[0][:].wait() assert data_from_op.dtype == expected_data.dtype assert data_from_op.shape == expected_data.shape assert (data_from_op == expected_data).all() # op.Image is a synonym for op.ImageGroup[0] assert op.Image.ready() assert (op.Image[:].wait() == expected_data).all() # Ensure that files opened by the inner operators are closed before we exit. op.DatasetGroup.resize(0)
def testNoChannelAxis(self): """ If we add a dataset that is missing a channel axis altogether, the operator should automatically append a channel axis. """ noChannelFilename = os.path.join(self.workingDir, 'NoChannelAxis.npy') noChannelData = numpy.random.random((100, 100)) numpy.save(noChannelFilename, noChannelData) info = DatasetInfo() info.filePath = noChannelFilename info.axistags = vigra.defaultAxistags('xy') graph = Graph() op = OpDataSelectionGroup(graph=graph) op.WorkingDirectory.setValue(self.workingDir) op.DatasetRoles.setValue(['RoleA']) op.DatasetGroup.resize(1) op.DatasetGroup[0].setValue(info) assert op.ImageGroup[0].ready() # Note that we expect a channel axis to be appended to the data. expected_data = noChannelData[:, :, numpy.newaxis] data_from_op = op.ImageGroup[0][:].wait() assert data_from_op.dtype == expected_data.dtype assert data_from_op.shape == expected_data.shape assert (data_from_op == expected_data).all() # op.Image is a synonym for op.ImageGroup[0] assert op.Image.ready() assert (op.Image[:].wait() == expected_data).all() # Ensure that files opened by the inner operators are closed before we exit. op.DatasetGroup.resize(0)
def _createDatasetInfo(self): info = DatasetInfo() info.filePath = self.tmpFilePath info.location = DatasetInfo.Location.ProjectInternal return info
def test06(self): """ Test the basic functionality of the v0.6 project format serializer. """ # Create an empty project with h5py.File(self.testProjectName) as testProject: testProject.create_dataset("ilastikVersion", data=0.6) ## ## Serialization ## # Create an operator to work with and give it some input graph = Graph() operatorToSave = OpMultiLaneDataSelectionGroup(graph=graph) serializer = DataSelectionSerializer(operatorToSave, 'DataSelectionTest') assert serializer.base_initialized operatorToSave.ProjectFile.setValue(testProject) operatorToSave.WorkingDirectory.setValue( os.path.split(__file__)[0]) operatorToSave.ProjectDataGroup.setValue(serializer.topGroupName + '/local_data') info = DatasetInfo() info.filePath = self.tmpFilePath info.location = DatasetInfo.Location.ProjectInternal operatorToSave.DatasetRoles.setValue(['Raw Data']) operatorToSave.DatasetGroup.resize(1) operatorToSave.DatasetGroup[0][0].setValue(info) # Now serialize! serializer.serializeToHdf5(testProject, self.testProjectName) # Check for dataset existence datasetInternalPath = serializer.topGroupName + '/local_data/' + info.datasetId dataset = testProject[datasetInternalPath][...] # Check axistags attribute assert 'axistags' in testProject[datasetInternalPath].attrs axistags_json = testProject[datasetInternalPath].attrs['axistags'] axistags = vigra.AxisTags.fromJSON(axistags_json) # Debug info... #logging.basicConfig(level=logging.DEBUG) logger.debug('dataset.shape = ' + str(dataset.shape)) logger.debug('should be ' + str(operatorToSave.Image[0].meta.shape)) logger.debug('dataset axistags:') logger.debug(axistags) logger.debug('should be:') logger.debug(operatorToSave.Image[0].meta.axistags) originalShape = operatorToSave.Image[0].meta.shape originalAxisTags = operatorToSave.Image[0].meta.axistags # Now we can directly compare the shape and axis ordering assert dataset.shape == originalShape assert axistags == originalAxisTags ## ## Deserialization ## # Create an empty operator graph = Graph() operatorToLoad = OpMultiLaneDataSelectionGroup(graph=graph) operatorToLoad.DatasetRoles.setValue(['Raw Data']) deserializer = DataSelectionSerializer( operatorToLoad, serializer.topGroupName ) # Copy the group name from the serializer we used. assert deserializer.base_initialized deserializer.deserializeFromHdf5(testProject, self.testProjectName) assert len(operatorToLoad.DatasetGroup) == len( operatorToSave.DatasetGroup) assert len(operatorToLoad.Image) == len(operatorToSave.Image) assert operatorToLoad.Image[0].meta.shape == operatorToSave.Image[ 0].meta.shape assert operatorToLoad.Image[ 0].meta.axistags == operatorToSave.Image[0].meta.axistags os.remove(self.testProjectName)
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName, stackVolumeCacheDir): """ Compute the predictions for each of the specified batch input files, and export them to corresponding h5 files. """ originalBatchInputPaths = list(batchInputPaths) batchInputPaths = convertStacksToH5(batchInputPaths, stackVolumeCacheDir) batchInputInfos = [] for p in batchInputPaths: info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem # Convert all paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) comp = PathComponents(p) comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() batchInputInfos.append(info) # Also convert the export dir to absolute (for the same reason) if batchExportDir != '': batchExportDir = os.path.abspath(batchExportDir) # Configure batch input operator opBatchInputs = workflow.batchInputApplet.topLevelOperator opBatchInputs.DatasetGroup.resize(len(batchInputInfos)) for info, multislot in zip(batchInputInfos, opBatchInputs.DatasetGroup): # FIXME: This assumes that the workflow has exactly one dataset role. multislot[0].setValue(info) # Configure batch export operator opBatchResults = workflow.batchResultsApplet.topLevelOperator # By default, the output files from the batch export operator # are named using the input file name. # If we converted any stacks to hdf5, then the user won't recognize the input file name. # Let's override the output file name using the *original* input file names. outputFileNameBases = [] for origPath in originalBatchInputPaths: outputFileNameBases.append(origPath.replace('*', 'STACKED')) opBatchResults.OutputFileNameBase.setValues(outputFileNameBases) opBatchResults.ExportDirectory.setValue(batchExportDir) opBatchResults.Format.setValue(ExportFormat.H5) opBatchResults.Suffix.setValue(batchOutputSuffix) opBatchResults.InternalPath.setValue(exportedDatasetName) logger.info("Exporting data to " + opBatchResults.OutputDataPath[0].value) # Set up progress display handling (just logging for now) currentProgress = [None] def handleProgress(percentComplete): if currentProgress[0] != percentComplete: currentProgress[0] = percentComplete logger.info("Batch job: {}% complete.".format(percentComplete)) progressSignal = opBatchResults.ProgressSignal[0].value progressSignal.subscribe(handleProgress) # Make it happen! result = opBatchResults.ExportResult[0].value return result