def addFileNames(self, fileNames): """ Add the given filenames to both the GUI table and the top-level operator inputs. """ with Tracer(traceLogger): # Allocate additional subslots in the operator inputs. oldNumFiles = len(self.mainOperator.Dataset) self.mainOperator.Dataset.resize( oldNumFiles+len(fileNames) ) # Assign values to the new inputs we just allocated. # The GUI will be updated by callbacks that are listening to slot changes for i, filePath in enumerate(fileNames): datasetInfo = DatasetInfo() cwd = self.mainOperator.WorkingDirectory.value absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if len(os.path.commonprefix([cwd, absPath])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath h5Exts = ['.ilp', '.h5', '.hdf5'] if os.path.splitext(datasetInfo.filePath)[1] in h5Exts: datasetNames = self.getPossibleInternalPaths( absPath ) if len(datasetNames) > 0: datasetInfo.filePath += str(datasetNames[0]) else: raise RuntimeError("HDF5 file has no image datasets") # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = ( self.guiMode == GuiMode.Normal ) self.mainOperator.Dataset[i+oldNumFiles].setValue( datasetInfo )
def addFileNames(self, fileNames): """ Add the given filenames to both the GUI table and the top-level operator inputs. """ with Tracer(traceLogger): # Allocate additional subslots in the operator inputs. oldNumFiles = len(self.mainOperator.Dataset) self.mainOperator.Dataset.resize(oldNumFiles + len(fileNames)) # Assign values to the new inputs we just allocated. # The GUI will be updated by callbacks that are listening to slot changes for i, filePath in enumerate(fileNames): datasetInfo = DatasetInfo() cwd = self.mainOperator.WorkingDirectory.value absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if len(os.path.commonprefix([cwd, absPath])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = (self.guiMode == GuiMode.Normal) self.mainOperator.Dataset[i + oldNumFiles].setValue(datasetInfo)
def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath): with Tracer(traceLogger): self._projectFilePath = projectFilePath self.initWithoutTopGroup(hdf5File, projectFilePath) infoDir = topGroup['infos'] self.mainOperator.Dataset.resize(len(infoDir)) for index, (infoGroupName, infoGroup) in enumerate(sorted(infoDir.items())): datasetInfo = DatasetInfo() # Make a reverse-lookup of the location storage strings LocationLookup = { v: k for k, v in self.LocationStrings.items() } datasetInfo.location = LocationLookup[str( infoGroup['location'].value)] # Write to the 'private' members to avoid resetting the dataset id datasetInfo._filePath = str(infoGroup['filePath'].value) datasetInfo._datasetId = str(infoGroup['datasetId'].value) # Deserialize the "allow labels" flag try: datasetInfo.allowLabels = infoGroup['allowLabels'].value except KeyError: pass # Deserialize the axisorder (if present) try: datasetInfo.axisorder = infoGroup['axisorder'].value except KeyError: if ilastik.utility.globals.ImportOptions.default_axis_order is not None: datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order # If the data is supposed to be in the project, # check for it now. if datasetInfo.location == DatasetInfo.Location.ProjectInternal: if not datasetInfo.datasetId in topGroup[ 'local_data'].keys(): raise RuntimeError( "Corrupt project file. Could not find data for " + infoGroupName) # If the data is supposed to exist outside the project, make sure it really does. if datasetInfo.location == DatasetInfo.Location.FileSystem: filePath = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0]).externalPath if not os.path.exists(filePath): raise RuntimeError("Could not find external data: " + filePath) # Give the new info to the operator self.mainOperator.Dataset[index].setValue(datasetInfo) self._dirty = False
def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath, headless): self._projectFilePath = projectFilePath self.initWithoutTopGroup(hdf5File, projectFilePath) # normally the serializer is not dirty after loading a project file # however, when the file was corrupted, the user has the possibility # to save the fixed file after loading it. dirty = False infoDir = topGroup['infos'] self.topLevelOperator.Dataset.resize( len(infoDir) ) for index, (infoGroupName, infoGroup) in enumerate( sorted(infoDir.items()) ): datasetInfo = DatasetInfo() # Make a reverse-lookup of the location storage strings LocationLookup = { v:k for k,v in self.LocationStrings.items() } datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ] # Write to the 'private' members to avoid resetting the dataset id datasetInfo._filePath = str(infoGroup['filePath'].value) datasetInfo._datasetId = str(infoGroup['datasetId'].value) # Deserialize the "allow labels" flag try: datasetInfo.allowLabels = infoGroup['allowLabels'].value except KeyError: pass # Deserialize the axisorder (if present) try: datasetInfo.axisorder = infoGroup['axisorder'].value except KeyError: pass # If the data is supposed to be in the project, # check for it now. if datasetInfo.location == DatasetInfo.Location.ProjectInternal: if not datasetInfo.datasetId in topGroup['local_data'].keys(): raise RuntimeError("Corrupt project file. Could not find data for " + infoGroupName) # If the data is supposed to exist outside the project, make sure it really does. if datasetInfo.location == DatasetInfo.Location.FileSystem: pathData = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0]) filePath = pathData.externalPath if not os.path.exists(filePath): if headless: raise RuntimeError("Could not find data at " + filePath) filt = "Image files (" + ' '.join('*.' + x for x in OpDataSelection.SupportedExtensions) + ')' newpath = self.repairFile(filePath, filt) newpath = newpath+pathData.internalPath datasetInfo._filePath = getPathVariants(newpath , os.path.split(projectFilePath)[0])[0] dirty = True # Give the new info to the operator self.topLevelOperator.Dataset[index].setValue(datasetInfo) self._dirty = dirty
def replaceWithStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted: return files = stackDlg.selectedFiles if len(files) == 0: return info = DatasetInfo() info.filePath = "//".join(files) prefix = os.path.commonprefix(files) info.nickname = PathComponents(prefix).filenameBase # Add an underscore for each wildcard digit num_wildcards = len(files[-1]) - len(prefix) - len( os.path.splitext(files[-1])[1]) info.nickname += "_" * num_wildcards # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = (self.guiMode == GuiMode.Normal) info.fromstack = True originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None: laneIndex = self._findFirstEmptyLane(roleIndex) if len(self.topLevelOperator.DatasetGroup) < laneIndex + 1: self.topLevelOperator.DatasetGroup.resize(laneIndex + 1) def importStack(): self.guiControlSignal.emit(ControlCommand.DisableAll) # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset(info) try: self.topLevelOperator.DatasetGroup[laneIndex][ roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] return_val = [False] self.handleDatasetConstraintError(info, filename, ex, roleIndex, laneIndex, return_val) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. self.topLevelOperator.DatasetGroup.resize( originalNumLanes) finally: self.guiControlSignal.emit(ControlCommand.Pop) req = Request(importStack) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes)) req.submit()
def addStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted : return files = stackDlg.selectedFiles sequence_axis = stackDlg.sequence_axis if len(files) == 0: return info = DatasetInfo() info.filePath = os.path.pathsep.join( files ) prefix = os.path.commonprefix(files) info.nickname = PathComponents(prefix).filenameBase # Add an underscore for each wildcard digit num_wildcards = len(files[-1]) - len(prefix) - len( os.path.splitext(files[-1])[1] ) info.nickname += "_"*num_wildcards # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = ( self.guiMode == GuiMode.Normal ) info.fromstack = True originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None or laneIndex == -1: laneIndex = len(self.topLevelOperator.DatasetGroup) if len(self.topLevelOperator.DatasetGroup) < laneIndex+1: self.topLevelOperator.DatasetGroup.resize(laneIndex+1) def importStack(): self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested.emit() # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset( info, sequence_axis ) try: self.topLevelOperator.DatasetGroup[laneIndex][roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] return_val = [False] self.handleDatasetConstraintError( info, filename, ex, roleIndex, laneIndex, return_val ) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. self.topLevelOperator.DatasetGroup.resize(originalNumLanes) finally: self.parentApplet.busy = False self.parentApplet.appletStateUpdateRequested.emit() req = Request( importStack ) req.notify_finished( lambda result: self.showDataset(laneIndex, roleIndex) ) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes ) ) req.submit()
def _createDatasetInfo(self, roleIndex, filePath, roi): """ Create a DatasetInfo object for the given filePath and roi. roi may be None, in which case it is ignored. """ datasetInfo = DatasetInfo() if roi is not None: datasetInfo.subvolume_roi = roi cwd = self.topLevelOperator.WorkingDirectory.value absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if relPath is not None and len(os.path.commonprefix([cwd, absPath ])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath datasetInfo.nickname = PathComponents(absPath).filenameBase h5Exts = ['.ilp', '.h5', '.hdf5'] if os.path.splitext(datasetInfo.filePath)[1] in h5Exts: datasetNames = self.getPossibleInternalPaths(absPath) if len(datasetNames) == 0: raise RuntimeError("HDF5 file %s has no image datasets" % datasetInfo.filePath) elif len(datasetNames) == 1: datasetInfo.filePath += str(datasetNames[0]) else: # If exactly one of the file's datasets matches a user's previous choice, use it. if roleIndex not in self._default_h5_volumes: self._default_h5_volumes[roleIndex] = set() previous_selections = self._default_h5_volumes[roleIndex] possible_auto_selections = previous_selections.intersection( datasetNames) if len(possible_auto_selections) == 1: datasetInfo.filePath += str( list(possible_auto_selections)[0]) else: # Ask the user which dataset to choose dlg = H5VolumeSelectionDlg(datasetNames, self) if dlg.exec_() == QDialog.Accepted: selected_index = dlg.combo.currentIndex() selected_dataset = str(datasetNames[selected_index]) datasetInfo.filePath += selected_dataset self._default_h5_volumes[roleIndex].add( selected_dataset) else: raise DataSelectionGui.UserCancelledError() # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = (self.guiMode == GuiMode.Normal) return datasetInfo
def addFileNames(self, fileNames): """ Add the given filenames to both the GUI table and the top-level operator inputs. """ with Tracer(traceLogger): infos = [] oldNumFiles = len(self.topLevelOperator.Dataset) # HACK: If the filePath isn't valid, replace it # This is to work around the scenario where two independent data selection applets are coupled, causing mutual resizes. # This will be fixed when a multi-file data selection applet gui replaces this gui. for i in reversed( range( oldNumFiles ) ): if not self.topLevelOperator.Dataset[i].ready(): oldNumFiles -= 1 else: break # Assign values to the new inputs we just allocated. # The GUI will be updated by callbacks that are listening to slot changes for i, filePath in enumerate(fileNames): datasetInfo = DatasetInfo() cwd = self.topLevelOperator.WorkingDirectory.value if not areOnSameDrive(filePath,cwd): QMessageBox.critical(self, "Drive Error","Data must be on same drive as working directory.") return absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if len(os.path.commonprefix([cwd, absPath])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath h5Exts = ['.ilp', '.h5', '.hdf5'] if os.path.splitext(datasetInfo.filePath)[1] in h5Exts: datasetNames = self.getPossibleInternalPaths( absPath ) if len(datasetNames) > 0: datasetInfo.filePath += str(datasetNames[0]) else: raise RuntimeError("HDF5 file %s has no image datasets" % datasetInfo.filePath) # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = ( self.guiMode == GuiMode.Normal ) infos.append(datasetInfo) #if no exception was thrown, set up the operator now self.topLevelOperator.Dataset.resize( oldNumFiles+len(fileNames) ) for i in range(len(infos)): self.topLevelOperator.Dataset[i+oldNumFiles].setValue( infos[i] )
def _createDatasetInfo(self, roleIndex, filePath, roi): """ Create a DatasetInfo object for the given filePath and roi. roi may be None, in which case it is ignored. """ datasetInfo = DatasetInfo() if roi is not None: datasetInfo.subvolume_roi = roi cwd = self.topLevelOperator.WorkingDirectory.value absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if relPath is not None and len(os.path.commonprefix([cwd, absPath])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath datasetInfo.nickname = PathComponents(absPath).filenameBase h5Exts = ['.ilp', '.h5', '.hdf5'] if os.path.splitext(datasetInfo.filePath)[1] in h5Exts: datasetNames = self.getPossibleInternalPaths( absPath ) if len(datasetNames) == 0: raise RuntimeError("HDF5 file %s has no image datasets" % datasetInfo.filePath) elif len(datasetNames) == 1: datasetInfo.filePath += str(datasetNames[0]) else: # If exactly one of the file's datasets matches a user's previous choice, use it. if roleIndex not in self._default_h5_volumes: self._default_h5_volumes[roleIndex] = set() previous_selections = self._default_h5_volumes[roleIndex] possible_auto_selections = previous_selections.intersection(datasetNames) if len(possible_auto_selections) == 1: datasetInfo.filePath += str(list(possible_auto_selections)[0]) else: # Ask the user which dataset to choose dlg = H5VolumeSelectionDlg(datasetNames, self) if dlg.exec_() == QDialog.Accepted: selected_index = dlg.combo.currentIndex() selected_dataset = str(datasetNames[selected_index]) datasetInfo.filePath += selected_dataset self._default_h5_volumes[roleIndex].add( selected_dataset ) else: raise DataSelectionGui.UserCancelledError() # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = ( self.guiMode == GuiMode.Normal ) return datasetInfo
def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath): with Tracer(traceLogger): self._projectFilePath = projectFilePath self.initWithoutTopGroup(hdf5File, projectFilePath) infoDir = topGroup['infos'] self.mainOperator.Dataset.resize( len(infoDir) ) for index, (infoGroupName, infoGroup) in enumerate( sorted(infoDir.items()) ): datasetInfo = DatasetInfo() # Make a reverse-lookup of the location storage strings LocationLookup = { v:k for k,v in self.LocationStrings.items() } datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ] # Write to the 'private' members to avoid resetting the dataset id datasetInfo._filePath = str(infoGroup['filePath'].value) datasetInfo._datasetId = str(infoGroup['datasetId'].value) # Deserialize the "allow labels" flag try: datasetInfo.allowLabels = infoGroup['allowLabels'].value except KeyError: pass # Deserialize the axisorder (if present) try: datasetInfo.axisorder = infoGroup['axisorder'].value except KeyError: if ilastik.utility.globals.ImportOptions.default_axis_order is not None: datasetInfo.axisorder = ilastik.utility.globals.ImportOptions.default_axis_order # If the data is supposed to be in the project, # check for it now. if datasetInfo.location == DatasetInfo.Location.ProjectInternal: if not datasetInfo.datasetId in topGroup['local_data'].keys(): raise RuntimeError("Corrupt project file. Could not find data for " + infoGroupName) # If the data is supposed to exist outside the project, make sure it really does. if datasetInfo.location == DatasetInfo.Location.FileSystem: filePath = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0] ).externalPath if not os.path.exists(filePath): raise RuntimeError("Could not find external data: " + filePath) # Give the new info to the operator self.mainOperator.Dataset[index].setValue(datasetInfo) self._dirty = False
def replaceWithStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted : return files = stackDlg.selectedFiles if len(files) == 0: return info = DatasetInfo() info.filePath = "//".join( files ) prefix = os.path.commonprefix(files) info.nickname = PathComponents(prefix).filenameBase + "..." # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = ( self.guiMode == GuiMode.Normal ) info.fromstack = True originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None: laneIndex = self._findFirstEmptyLane(roleIndex) if len(self.topLevelOperator.DatasetGroup) < laneIndex+1: self.topLevelOperator.DatasetGroup.resize(laneIndex+1) def importStack(): self.guiControlSignal.emit( ControlCommand.DisableAll ) # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset( info ) try: self.topLevelOperator.DatasetGroup[laneIndex][roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] if not self.handleDatasetConstraintError( info, filename, ex, roleIndex, laneIndex ): self.topLevelOperator.DatasetGroup.resize(originalNumLanes) finally: self.guiControlSignal.emit( ControlCommand.Pop ) req = Request( importStack ) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes ) ) req.submit()
def importStackFromGlobString(self, globString): """ The word 'glob' is used loosely here. See the OpStackLoader operator for details. """ info = DatasetInfo() info.filePath = globString # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = ( self.guiMode == GuiMode.Normal ) def importStack(): self.guiControlSignal.emit( ControlCommand.DisableAll ) # Serializer will update the operator for us, which will propagate to the GUI. self.serializer.importStackAsLocalDataset( info ) self.guiControlSignal.emit( ControlCommand.Pop ) importThread = threading.Thread( target=importStack ) importThread.start()
def importStackFromGlobString(self, globString): """ The word 'glob' is used loosely here. See the OpStackLoader operator for details. """ info = DatasetInfo() info.filePath = globString # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = (self.guiMode == GuiMode.Normal) def importStack(): self.guiControlSignal.emit(ControlCommand.DisableAll) # Serializer will update the operator for us, which will propagate to the GUI. self.serializer.importStackAsLocalDataset(info) self.guiControlSignal.emit(ControlCommand.Pop) importThread = threading.Thread(target=importStack) importThread.start()
def importStackFromGlobString(self, globString): """ The word 'glob' is used loosely here. See the OpStackLoader operator for details. """ globString = globString.replace("\\","/") info = DatasetInfo() info.filePath = globString # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = ( self.guiMode == GuiMode.Normal ) def importStack(): self.guiControlSignal.emit( ControlCommand.DisableAll ) # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset( info ) finally: self.guiControlSignal.emit( ControlCommand.Pop ) req = Request( importStack ) req.notify_failed( partial(self.handleFailedStackLoad, globString ) ) req.submit()
def addFileNames(self, fileNames, roleIndex, startingLane=None): """ Add the given filenames to both the GUI table and the top-level operator inputs. If startingLane is None, the filenames will be *appended* to the role's list of files. """ infos = [] if startingLane is None: startingLane = self._findFirstEmptyLane(roleIndex) endingLane = startingLane + len(fileNames) - 1 else: assert startingLane < len(self.topLevelOperator.DatasetGroup) endingLane = startingLane + len(fileNames) - 1 if self._max_lanes and endingLane >= self._max_lanes: msg = "You may not add more than {} file(s) to this workflow. Please try again.".format( self._max_lanes) QMessageBox.critical(self, "Too many files", msg) return # Assign values to the new inputs we just allocated. # The GUI will be updated by callbacks that are listening to slot changes for i, filePath in enumerate(fileNames): datasetInfo = DatasetInfo() cwd = self.topLevelOperator.WorkingDirectory.value if not areOnSameDrive(filePath, cwd): QMessageBox.critical( self, "Drive Error", "Data must be on same drive as working directory.") return absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if len(os.path.commonprefix([cwd, absPath])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath datasetInfo.nickname = PathComponents(absPath).filenameBase h5Exts = ['.ilp', '.h5', '.hdf5'] if os.path.splitext(datasetInfo.filePath)[1] in h5Exts: datasetNames = self.getPossibleInternalPaths(absPath) if len(datasetNames) > 0: datasetInfo.filePath += str(datasetNames[0]) else: raise RuntimeError("HDF5 file %s has no image datasets" % datasetInfo.filePath) # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = (self.guiMode == GuiMode.Normal) infos.append(datasetInfo) # if no exception was thrown, set up the operator now opTop = self.topLevelOperator originalSize = len(opTop.DatasetGroup) if len(opTop.DatasetGroup) < endingLane + 1: opTop.DatasetGroup.resize(endingLane + 1) for laneIndex, info in zip(range(startingLane, endingLane + 1), infos): try: self.topLevelOperator.DatasetGroup[laneIndex][ roleIndex].setValue(info) except DatasetConstraintError as ex: return_val = [False] # Give the user a chance to fix the problem self.handleDatasetConstraintError(info, info.filePath, ex, roleIndex, laneIndex, return_val) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. opTop.DatasetGroup.resize(originalSize) break except OpDataSelection.InvalidDimensionalityError as ex: opTop.DatasetGroup.resize(originalSize) QMessageBox.critical(self, "Dataset has different dimensionality", ex.message) break except: QMessageBox.critical( self, "Dataset Load Error", "Wasn't able to load your dataset into the workflow. See console for details." ) opTop.DatasetGroup.resize(originalSize) raise self.updateInternalPathVisiblity()
def addFileNames(self, fileNames, roleIndex, startingLane=None): """ Add the given filenames to both the GUI table and the top-level operator inputs. If startingLane is None, the filenames will be *appended* to the role's list of files. """ infos = [] if startingLane is None: startingLane = self._findFirstEmptyLane(roleIndex) endingLane = startingLane+len(fileNames)-1 else: assert startingLane < len(self.topLevelOperator.DatasetGroup) endingLane = startingLane+len(fileNames)-1 # Assign values to the new inputs we just allocated. # The GUI will be updated by callbacks that are listening to slot changes for i, filePath in enumerate(fileNames): datasetInfo = DatasetInfo() cwd = self.topLevelOperator.WorkingDirectory.value if not areOnSameDrive(filePath,cwd): QMessageBox.critical(self, "Drive Error","Data must be on same drive as working directory.") return absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if len(os.path.commonprefix([cwd, absPath])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath datasetInfo.nickname = PathComponents(absPath).filenameBase h5Exts = ['.ilp', '.h5', '.hdf5'] if os.path.splitext(datasetInfo.filePath)[1] in h5Exts: datasetNames = self.getPossibleInternalPaths( absPath ) if len(datasetNames) > 0: datasetInfo.filePath += str(datasetNames[0]) else: raise RuntimeError("HDF5 file %s has no image datasets" % datasetInfo.filePath) # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = ( self.guiMode == GuiMode.Normal ) infos.append(datasetInfo) # if no exception was thrown, set up the operator now opTop = self.topLevelOperator originalSize = len(opTop.DatasetGroup) if len( opTop.DatasetGroup ) < endingLane+1: opTop.DatasetGroup.resize( endingLane+1 ) for laneIndex, info in zip(range(startingLane, endingLane+1), infos): try: self.topLevelOperator.DatasetGroup[laneIndex][roleIndex].setValue( info ) except DatasetConstraintError as ex: # Give the user a chance to fix the problem if not self.handleDatasetConstraintError(info, info.filePath, ex, roleIndex, laneIndex): opTop.DatasetGroup.resize( originalSize ) break except: QMessageBox.critical( self, "Dataset Load Error", "Wasn't able to load your dataset into the workflow. See console for details." ) opTop.DatasetGroup.resize( originalSize ) raise self.updateInternalPathVisiblity()
def addFileNames(self, fileNames, roleIndex, startingLane=None): """ Add the given filenames to both the GUI table and the top-level operator inputs. If startingLane is None, the filenames will be *appended* to the role's list of files. """ infos = [] if startingLane is None or startingLane == -1: startingLane = len(self.topLevelOperator.DatasetGroup) endingLane = startingLane+len(fileNames)-1 else: assert startingLane < len(self.topLevelOperator.DatasetGroup) max_files = len(self.topLevelOperator.DatasetGroup) - \ startingLane if len(fileNames) > max_files: msg = "You selected {num_selected} files for {num_slots} "\ "slots. To add new files use the 'Add new...' option "\ "in the context menu or the button in the last row."\ .format(num_selected=len(fileNames), num_slots=max_files) QMessageBox.critical( self, "Too many files", msg ) return endingLane = min(startingLane+len(fileNames)-1, len(self.topLevelOperator.DatasetGroup)) if self._max_lanes and endingLane >= self._max_lanes: msg = "You may not add more than {} file(s) to this workflow. Please try again.".format( self._max_lanes ) QMessageBox.critical( self, "Too many files", msg ) return # Assign values to the new inputs we just allocated. # The GUI will be updated by callbacks that are listening to slot changes for i, filePath in enumerate(fileNames): datasetInfo = DatasetInfo() cwd = self.topLevelOperator.WorkingDirectory.value absPath, relPath = getPathVariants(filePath, cwd) # Relative by default, unless the file is in a totally different tree from the working directory. if relPath is not None and len(os.path.commonprefix([cwd, absPath])) > 1: datasetInfo.filePath = relPath else: datasetInfo.filePath = absPath datasetInfo.nickname = PathComponents(absPath).filenameBase h5Exts = ['.ilp', '.h5', '.hdf5'] if os.path.splitext(datasetInfo.filePath)[1] in h5Exts: datasetNames = self.getPossibleInternalPaths( absPath ) if len(datasetNames) > 0: datasetInfo.filePath += str(datasetNames[0]) else: raise RuntimeError("HDF5 file %s has no image datasets" % datasetInfo.filePath) # Allow labels by default if this gui isn't being used for batch data. datasetInfo.allowLabels = ( self.guiMode == GuiMode.Normal ) infos.append(datasetInfo) # if no exception was thrown, set up the operator now opTop = self.topLevelOperator originalSize = len(opTop.DatasetGroup) if len( opTop.DatasetGroup ) < endingLane+1: opTop.DatasetGroup.resize( endingLane+1 ) for laneIndex, info in zip(range(startingLane, endingLane+1), infos): try: self.topLevelOperator.DatasetGroup[laneIndex][roleIndex].setValue( info ) except DatasetConstraintError as ex: return_val = [False] # Give the user a chance to fix the problem self.handleDatasetConstraintError(info, info.filePath, ex, roleIndex, laneIndex, return_val) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. opTop.DatasetGroup.resize( originalSize ) break except OpDataSelection.InvalidDimensionalityError as ex: opTop.DatasetGroup.resize( originalSize ) QMessageBox.critical( self, "Dataset has different dimensionality", ex.message ) break except: QMessageBox.critical( self, "Dataset Load Error", "Wasn't able to load your dataset into the workflow. See console for details." ) opTop.DatasetGroup.resize( originalSize ) raise # If we succeeded in adding all images, show the first one. if laneIndex == endingLane: self.showDataset(startingLane, roleIndex) # Notify the workflow that something that could affect applet readyness has occurred. self.parentApplet.appletStateUpdateRequested.emit() self.updateInternalPathVisiblity()
def _readDatasetInfo(self, infoGroup, localDataGroup, projectFilePath, headless): # Unready datasets are represented with an empty group. if len( infoGroup ) == 0: return None, False datasetInfo = DatasetInfo() # Make a reverse-lookup of the location storage strings LocationLookup = { v:k for k,v in self.LocationStrings.items() } datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ] # Write to the 'private' members to avoid resetting the dataset id datasetInfo._filePath = infoGroup['filePath'].value datasetInfo._datasetId = infoGroup['datasetId'].value try: datasetInfo.allowLabels = infoGroup['allowLabels'].value except KeyError: pass try: datasetInfo.drange = tuple( infoGroup['drange'].value ) except KeyError: pass try: datasetInfo.nickname = infoGroup['nickname'].value except KeyError: datasetInfo.nickname = PathComponents(datasetInfo.filePath).filenameBase try: tags = vigra.AxisTags.fromJSON( infoGroup['axistags'].value ) datasetInfo.axistags = tags except KeyError: # Old projects just have an 'axisorder' field instead of full axistags try: axisorder = infoGroup['axisorder'].value datasetInfo.axistags = vigra.defaultAxistags(axisorder) except KeyError: pass # If the data is supposed to be in the project, # check for it now. if datasetInfo.location == DatasetInfo.Location.ProjectInternal: if not datasetInfo.datasetId in localDataGroup.keys(): raise RuntimeError("Corrupt project file. Could not find data for " + infoGroup.name) dirty = False # If the data is supposed to exist outside the project, make sure it really does. if datasetInfo.location == DatasetInfo.Location.FileSystem and not isUrl(datasetInfo.filePath): pathData = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0]) filePath = pathData.externalPath if not os.path.exists(filePath): if headless: raise RuntimeError("Could not find data at " + filePath) filt = "Image files (" + ' '.join('*.' + x for x in OpDataSelection.SupportedExtensions) + ')' newpath = self.repairFile(filePath, filt) if pathData.internalPath is not None: newpath += pathData.internalPath datasetInfo._filePath = getPathVariants(newpath , os.path.split(projectFilePath)[0])[0] dirty = True return datasetInfo, dirty
def _readDatasetInfo(self, infoGroup, localDataGroup, projectFilePath, headless): # Unready datasets are represented with an empty group. if len( infoGroup ) == 0: return None, False datasetInfo = DatasetInfo() # Make a reverse-lookup of the location storage strings LocationLookup = { v:k for k,v in self.LocationStrings.items() } datasetInfo.location = LocationLookup[ str(infoGroup['location'].value) ] # Write to the 'private' members to avoid resetting the dataset id datasetInfo._filePath = infoGroup['filePath'].value datasetInfo._datasetId = infoGroup['datasetId'].value try: datasetInfo.allowLabels = infoGroup['allowLabels'].value except KeyError: pass try: datasetInfo.drange = tuple( infoGroup['drange'].value ) except KeyError: pass try: datasetInfo.nickname = infoGroup['nickname'].value except KeyError: datasetInfo.nickname = PathComponents(datasetInfo.filePath).filenameBase try: datasetInfo.fromstack = infoGroup['fromstack'].value except KeyError: # Guess based on the storage setting and original filepath datasetInfo.fromstack = ( datasetInfo.location == DatasetInfo.Location.ProjectInternal and ( ('?' in datasetInfo._filePath) or (os.path.pathsep in datasetInfo._filePath) ) ) try: tags = vigra.AxisTags.fromJSON( infoGroup['axistags'].value ) datasetInfo.axistags = tags except KeyError: # Old projects just have an 'axisorder' field instead of full axistags try: axisorder = infoGroup['axisorder'].value datasetInfo.axistags = vigra.defaultAxistags(axisorder) except KeyError: pass try: start, stop = map( tuple, infoGroup['subvolume_roi'].value ) datasetInfo.subvolume_roi = (start, stop) except KeyError: pass # If the data is supposed to be in the project, # check for it now. if datasetInfo.location == DatasetInfo.Location.ProjectInternal: if not datasetInfo.datasetId in localDataGroup.keys(): raise RuntimeError("Corrupt project file. Could not find data for " + infoGroup.name) dirty = False # If the data is supposed to exist outside the project, make sure it really does. if datasetInfo.location == DatasetInfo.Location.FileSystem and not isUrl(datasetInfo.filePath): pathData = PathComponents( datasetInfo.filePath, os.path.split(projectFilePath)[0]) filePath = pathData.externalPath if not os.path.exists(filePath): if headless: raise RuntimeError("Could not find data at " + filePath) filt = "Image files (" + ' '.join('*.' + x for x in OpDataSelection.SupportedExtensions) + ')' newpath = self.repairFile(filePath, filt) if pathData.internalPath is not None: newpath += pathData.internalPath datasetInfo._filePath = getPathVariants(newpath , os.path.split(projectFilePath)[0])[0] dirty = True return datasetInfo, dirty