def _applyInternalPathToTempOps(self, index): if index == -1: return newInternalPath = str(self.internalDatasetNameComboBox.currentText()) # Save a copy of our settings oldInfos = {} for laneIndex, op in self.tempOps.items(): oldInfos[laneIndex] = copy.copy(op.Dataset.value) # Attempt to apply to all temp operators try: for laneIndex, op in self.tempOps.items(): info = copy.copy(op.Dataset.value) pathComponents = PathComponents(info.filePath) if pathComponents.internalPath != newInternalPath: pathComponents.internalPath = newInternalPath info.filePath = pathComponents.totalPath() op.Dataset.setValue(info) self._error_fields.discard('Internal Dataset Name') return True except Exception as e: # Revert everything back to the previous state for laneIndex, op in self.tempOps.items(): op.Dataset.setValue(oldInfos[laneIndex]) traceback.print_exc() msg = "Could not set new internal path settings due to an exception:\n" msg += "{}".format(e) QMessageBox.warning(self, "Error", msg) self._error_fields.add('Internal Dataset Name') return False
def _applyInternalPathToTempOps(self, index): if index == -1: return newInternalPath = str( self.internalDatasetNameComboBox.currentText() ) # Save a copy of our settings oldInfos = {} for laneIndex, op in self.tempOps.items(): oldInfos[laneIndex] = copy.copy( op.Dataset.value ) # Attempt to apply to all temp operators try: for laneIndex, op in self.tempOps.items(): info = copy.copy( op.Dataset.value ) pathComponents = PathComponents(info.filePath) if pathComponents.internalPath != newInternalPath: pathComponents.internalPath = newInternalPath info.filePath = pathComponents.totalPath() op.Dataset.setValue( info ) self._error_fields.discard('Internal Dataset Name') return True except Exception as e: # Revert everything back to the previous state for laneIndex, op in self.tempOps.items(): op.Dataset.setValue( oldInfos[laneIndex] ) traceback.print_exc() msg = "Could not set new internal path settings due to an exception:\n" msg += "{}".format( e ) QMessageBox.warning(self, "Error", msg) self._error_fields.add('Internal Dataset Name') return False
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName): """ Compute the predictions for each of the specified batch input files, and export them to corresponding h5 files. """ batchInputPaths = convertStacksToH5(batchInputPaths) batchInputInfos = [] for p in batchInputPaths: info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem # Convert all paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) comp = PathComponents(p) comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() batchInputInfos.append(info) # Configure batch input operator opBatchInputs = workflow.batchInputApplet.topLevelOperator opBatchInputs.Dataset.setValues( batchInputInfos ) # Configure batch export operator opBatchResults = workflow.batchResultsApplet.topLevelOperator opBatchResults.ExportDirectory.setValue(batchExportDir) opBatchResults.Format.setValue(ExportFormat.H5) opBatchResults.Suffix.setValue(batchOutputSuffix) opBatchResults.InternalPath.setValue(exportedDatasetName) opBatchResults.SelectedSlices.setValue([30]) logger.info( "Exporting data to " + opBatchResults.OutputDataPath[0].value ) # Set up progress display handling (just logging for now) currentProgress = [None] def handleProgress(percentComplete): if currentProgress[0] != percentComplete: currentProgress[0] = percentComplete logger.info("Batch job: {}% complete.".format(percentComplete)) progressSignal = opBatchResults.ProgressSignal[0].value progressSignal.subscribe( handleProgress ) # Make it happen! result = opBatchResults.ExportResult[0].value return result
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName, stackVolumeCacheDir): """ Compute the predictions for each of the specified batch input files, and export them to corresponding h5 files. """ originalBatchInputPaths = list(batchInputPaths) batchInputPaths = convertStacksToH5(batchInputPaths, stackVolumeCacheDir) batchInputInfos = [] for p in batchInputPaths: info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem # Convert all paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) comp = PathComponents(p) comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() batchInputInfos.append(info) # Also convert the export dir to absolute (for the same reason) if batchExportDir != '': batchExportDir = os.path.abspath( batchExportDir ) # Configure batch input operator opBatchInputs = workflow.batchInputApplet.topLevelOperator opBatchInputs.Dataset.setValues( batchInputInfos ) # Configure batch export operator opBatchResults = workflow.batchResultsApplet.topLevelOperator # By default, the output files from the batch export operator # are named using the input file name. # If we converted any stacks to hdf5, then the user won't recognize the input file name. # Let's override the output file name using the *original* input file names. outputFileNameBases = [] for origPath in originalBatchInputPaths: outputFileNameBases.append( origPath.replace('*', 'STACKED') ) opBatchResults.OutputFileNameBase.setValues( outputFileNameBases ) opBatchResults.ExportDirectory.setValue(batchExportDir) opBatchResults.Format.setValue(ExportFormat.H5) opBatchResults.Suffix.setValue(batchOutputSuffix) opBatchResults.InternalPath.setValue(exportedDatasetName) logger.info( "Exporting data to " + opBatchResults.OutputDataPath[0].value ) # Set up progress display handling (just logging for now) currentProgress = [None] def handleProgress(percentComplete): if currentProgress[0] != percentComplete: currentProgress[0] = percentComplete logger.info("Batch job: {}% complete.".format(percentComplete)) progressSignal = opBatchResults.ProgressSignal[0].value progressSignal.subscribe( handleProgress ) # Make it happen! result = opBatchResults.ExportResult[0].value return result
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName, stackVolumeCacheDir): """ Compute the predictions for each of the specified batch input files, and export them to corresponding h5 files. """ originalBatchInputPaths = list(batchInputPaths) batchInputPaths = convertStacksToH5(batchInputPaths, stackVolumeCacheDir) batchInputInfos = [] for p in batchInputPaths: info = DatasetInfo() info.location = DatasetInfo.Location.FileSystem # Convert all paths to absolute # (otherwise they are relative to the project file, which probably isn't what the user meant) comp = PathComponents(p) comp.externalPath = os.path.abspath(comp.externalPath) info.filePath = comp.totalPath() batchInputInfos.append(info) # Also convert the export dir to absolute (for the same reason) if batchExportDir != '': batchExportDir = os.path.abspath(batchExportDir) # Configure batch input operator opBatchInputs = workflow.batchInputApplet.topLevelOperator opBatchInputs.DatasetGroup.resize(len(batchInputInfos)) for info, multislot in zip(batchInputInfos, opBatchInputs.DatasetGroup): # FIXME: This assumes that the workflow has exactly one dataset role. multislot[0].setValue(info) # Configure batch export operator opBatchResults = workflow.batchResultsApplet.topLevelOperator # By default, the output files from the batch export operator # are named using the input file name. # If we converted any stacks to hdf5, then the user won't recognize the input file name. # Let's override the output file name using the *original* input file names. outputFileNameBases = [] for origPath in originalBatchInputPaths: outputFileNameBases.append(origPath.replace('*', 'STACKED')) opBatchResults.OutputFileNameBase.setValues(outputFileNameBases) opBatchResults.ExportDirectory.setValue(batchExportDir) opBatchResults.Format.setValue(ExportFormat.H5) opBatchResults.Suffix.setValue(batchOutputSuffix) opBatchResults.InternalPath.setValue(exportedDatasetName) logger.info("Exporting data to " + opBatchResults.OutputDataPath[0].value) # Set up progress display handling (just logging for now) currentProgress = [None] def handleProgress(percentComplete): if currentProgress[0] != percentComplete: currentProgress[0] = percentComplete logger.info("Batch job: {}% complete.".format(percentComplete)) progressSignal = opBatchResults.ProgressSignal[0].value progressSignal.subscribe(handleProgress) # Make it happen! result = opBatchResults.ExportResult[0].value return result