def run_export(self): role_names = self.parentApplet.dataSelectionApplet.topLevelOperator.DatasetRoles.value # Prepare file lists in an OrderedDict role_path_dict = OrderedDict() role_path_dict[0] = BatchProcessingGui.get_all_item_strings( self.list_widgets[0]) num_datasets = len(role_path_dict[0]) for role_index, list_widget in enumerate(self.list_widgets[1:], start=1): role_path_dict[ role_index] = BatchProcessingGui.get_all_item_strings( self.list_widgets[role_index]) assert len(role_path_dict[role_index]) <= num_datasets, \ "Too many files given for role: '{}'".format( role_names[role_index] ) if len(role_path_dict[role_index]) < num_datasets: role_path_dict[role_index] += [None] * ( num_datasets - len(role_path_dict[role_index])) # Run the export in a separate thread export_req = Request( partial(self.parentApplet.run_export, role_path_dict)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def run_export(self): role_names = self.parentApplet.dataSelectionApplet.role_names # Prepare file lists in an OrderedDict role_inputs = { role_name: self._data_role_widgets[role_name].filepaths for role_name in role_names } if all(len(role_inp) == 0 for role_inp in role_inputs.values()): return # Run the export in a separate thread lane_configs = self.parentApplet.dataSelectionApplet.create_lane_configs( role_inputs=role_inputs) export_req = Request( partial(self.parentApplet.run_export, lane_configs=lane_configs)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def export_object_data(self, settings, selected_features, gui=None): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ self.save_export_progress_dialog(None) if gui is None or "dialog" not in gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) else: progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display) # ();return request = Request(export) request.notify_failed(gui["fail"] if gui is not None and "fail" in gui else self.export_failed) request.notify_failed(self.export_failed) request.notify_finished(gui["ok"] if gui is not None and "ok" in gui else self.export_finished) request.notify_cancelled(gui["cancel"] if gui is not None and "cancel" in gui else self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel)
def run_export(self): role_names = self.parentApplet.dataSelectionApplet.topLevelOperator.DatasetRoles.value # Prepare file lists in an OrderedDict role_path_dict = OrderedDict() role_path_dict[0] = BatchProcessingGui.get_all_item_strings(self.list_widgets[0]) num_datasets = len(role_path_dict[0]) for role_index, list_widget in enumerate(self.list_widgets[1:], start=1): role_path_dict[role_index] = BatchProcessingGui.get_all_item_strings(self.list_widgets[role_index]) assert len(role_path_dict[role_index]) <= num_datasets, \ "Too many files given for role: '{}'".format( role_names[role_index] ) if len(role_path_dict[role_index]) < num_datasets: role_path_dict[role_index] += [None] * (num_datasets-len(role_path_dict[role_index])) # Run the export in a separate thread export_req = Request(partial(self.parentApplet.run_export, role_path_dict)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def exportFinalSegmentation(self, outputPath, axisorder, progressCallback=None): assert self.FinalSegmentation.ready( ), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes(parent=self) opTranspose.AxisOrder.setValue(axisorder) opTranspose.Input.connect(self.FinalSegmentation) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue(f) opExporter.hdf5Path.setValue('split_result') opExporter.Image.connect(opTranspose.Output) if progressCallback is not None: opExporter.progressSignal.subscribe(progressCallback) req = Request(partial(self._runExporter, opExporter)) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed(exc, exc_info): cleanOps() f.close() import traceback traceback.print_tb(exc_info[2]) msg = "Final Segmentation export FAILED due to the following error:\n{}".format( exc) logger.error(msg) def handleFinished(result): try: cleanOps() logger.info("FINISHED Final Segmentation Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info("Final Segmentation export was cancelled!") req.notify_failed(handleFailed) req.notify_finished(handleFinished) req.notify_cancelled(handleCancelled) req.submit() return req # Returned in case the user wants to cancel it.
def get(self, roi): """This method is used to retrieve the actual content of a Slot. :param roi: the region of interest, e.g. a subregion in the case of an ArrayLike stype :param destination: this may define a destination area for the request, for example a ndarray into which the results should be written in the case of an ArrayLike stype Returns: a request.Request object. """ if self._value is not None: # this handles the case of an inputslot # having a ._value # --> construct cheaper request object for this case result = self.stype.writeIntoDestination(None, self._value, roi) return ValueRequest(result) elif self.partner is not None: # this handles the case of an inputslot # --> just relay the request return self.partner.get(roi) else: if not self.ready(): # Something is wrong. Are we cancelled? Request.raise_if_cancelled() msg = "Can't get data from slot {}.{} yet."\ " It isn't ready."\ "First upstream problem slot is: {}" msg = msg.format( self.getRealOperator().__class__, self.name, Slot._findUpstreamProblemSlot(self) ) raise Slot.SlotNotReadyError(msg) # If someone is asking for data from an inputslot that has # no value and no partner, then something is wrong. if self._type == "input": # Something is wrong. Are we cancelled? Request.raise_if_cancelled() assert self._type != "input", "This inputSlot has no value and no partner. You can't ask for its data yet!" # normal (outputslot) case # --> construct heavy request object.. execWrapper = Slot.RequestExecutionWrapper(self, roi) request = Request(execWrapper) # We must decrement the execution count even if the # request is cancelled request.notify_cancelled(execWrapper.handleCancel) return request
def exportFinalSegmentation(self, outputPath, axisorder, progressCallback=None): assert self.FinalSegmentation.ready(), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes( parent=self ) opTranspose.AxisOrder.setValue( axisorder ) opTranspose.Input.connect( self.FinalSegmentation ) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue( f ) opExporter.hdf5Path.setValue( 'split_result' ) opExporter.Image.connect( opTranspose.Output ) if progressCallback is not None: opExporter.progressSignal.subscribe( progressCallback ) req = Request( partial(self._runExporter, opExporter) ) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed( exc, exc_info ): cleanOps() f.close() import traceback traceback.print_tb(exc_info[2]) msg = "Final Segmentation export FAILED due to the following error:\n{}".format( exc ) logger.error( msg ) def handleFinished( result ): try: cleanOps() logger.info("FINISHED Final Segmentation Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info( "Final Segmentation export was cancelled!" ) req.notify_failed( handleFailed ) req.notify_finished( handleFinished ) req.notify_cancelled( handleCancelled ) req.submit() return req # Returned in case the user wants to cancel it.
def run(self, opExport): """ Start the export and return immediately (after showing the progress dialog). :param opExport: The export object to execute. It must have a 'run_export()' method and a 'progressSignal' member. """ progressDlg = MultiStepProgressDialog(parent=self.parent()) progressDlg.setNumberOfSteps(1) def _forwardProgressToGui(progress): self._forwardingSignal.emit( partial(progressDlg.setStepProgress, progress)) opExport.progressSignal.subscribe(_forwardProgressToGui) def _onFinishExport(*args): # Also called on cancel self._forwardingSignal.emit(progressDlg.finishStep) def _onFail(exc, exc_info): import traceback traceback.print_tb(exc_info[2]) msg = "Failed to export layer due to the following error:\n{}".format( exc) self._forwardingSignal.emit( partial(QMessageBox.critical, self.parent(), "Export Failed", msg)) self._forwardingSignal.emit(progressDlg.setFailed) # Use a request to execute in the background req = Request(opExport.run_export) req.notify_cancelled(_onFinishExport) req.notify_finished(_onFinishExport) req.notify_failed(_onFail) # Allow cancel. progressDlg.rejected.connect(req.cancel) # Start the export req.submit() # Execute the progress dialog # (We can block the thread here because the QDialog spins up its own event loop.) progressDlg.exec_()
def test_cancellation_behavior(): """ If a request is cancelled while it was waiting on a lock, it should raise the CancellationException. """ lock = RequestLock() lock.acquire() def f(): try: with lock: assert False except Request.CancellationException: pass else: assert False finished = [False] cancelled = [False] failed = [False] def handle_finished(result): finished[0] = True def handle_cancelled(): cancelled[0] = True def handle_failed(*args): failed[0] = True req = Request(f) req.notify_finished(handle_finished) req.notify_failed(handle_failed) req.notify_cancelled(handle_cancelled) req.submit() req.cancel() time.sleep(0.1) lock.release() time.sleep(0.1) assert not finished[0] and not failed[0] and cancelled[0]
def run_export(self): role_names = self.parentApplet.dataSelectionApplet.topLevelOperator.DatasetRoles.value # Prepare file lists in an OrderedDict role_path_dict = OrderedDict( (role_name, self._data_role_widgets[role_name].filepaths) for role_name in role_names ) dominant_role_name = role_names[0] num_paths = len(role_path_dict[dominant_role_name]) if num_paths == 0: return for role_name in role_names[1:]: paths = role_path_dict[role_name] if len(paths) == 0: role_path_dict[role_name] = [None] * num_paths if len(role_path_dict[role_name]) != num_paths: raise BatchProcessingDataConstraintException( f"Number of files for '{role_name!r}' does not match! " f"Exptected {num_paths} files." ) # Run the export in a separate thread export_req = Request(partial(self.parentApplet.run_export, role_path_dict)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def run(self, opExport): """ Start the export and return immediately (after showing the progress dialog). :param opExport: The export object to execute. It must have a 'run_export()' method and a 'progressSignal' member. """ progressDlg = MultiStepProgressDialog(parent=self.parent()) progressDlg.setNumberOfSteps(1) def _forwardProgressToGui(progress): self._forwardingSignal.emit( partial( progressDlg.setStepProgress, progress ) ) opExport.progressSignal.subscribe( _forwardProgressToGui ) def _onFinishExport( *args ): # Also called on cancel self._forwardingSignal.emit( progressDlg.finishStep ) def _onFail( exc, exc_info ): import traceback traceback.print_tb(exc_info[2]) msg = "Failed to export layer due to the following error:\n{}".format( exc ) self._forwardingSignal.emit( partial(QMessageBox.critical, self.parent(), "Export Failed", msg) ) self._forwardingSignal.emit( progressDlg.setFailed ) # Use a request to execute in the background req = Request( opExport.run_export ) req.notify_cancelled( _onFinishExport ) req.notify_finished( _onFinishExport ) req.notify_failed( _onFail ) # Allow cancel. progressDlg.rejected.connect( req.cancel ) # Start the export req.submit() # Execute the progress dialog # (We can block the thread here because the QDialog spins up its own event loop.) progressDlg.exec_()
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ settings, selected_features = self.get_table_export_settings() self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog(["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui, } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
def export_object_data(self, settings, selected_features, gui=None): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ self.save_export_progress_dialog(None) if gui is None or "dialog" not in gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) else: progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel)
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework Args: lane_index (int): Index of the lane to be exported show_gui (bool, optional): boolean to determine whether or not to show gui filename_suffix (str, optional): If provided, appended to the filename (before the extension) Returns: lazyflow.request.Request: Request object from which the result can be obtained. """ settings, selected_features = self.get_table_export_settings() self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", range(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog(["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ settings, selected_features = self.get_table_export_settings() self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog( ["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework Args: lane_index (int): Index of the lane to be exported show_gui (bool, optional): boolean to determine whether or not to show gui filename_suffix (str, optional): If provided, appended to the filename (before the extension) Returns: lazyflow.request.Request: Request object from which the result can be obtained. """ settings, selected_features = self.get_table_export_settings() if not settings: return Request.with_value(None) self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", range(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog( ["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui, } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
def exportFinalSupervoxels(self, outputPath, axisorder, progressCallback=None): """ Executes the export process within a request. The (already-running) request is returned, in case you want to wait for it or monitor its progress. """ assert self.FinalSupervoxels.ready( ), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes(parent=self) opTranspose.AxisOrder.setValue(axisorder) opTranspose.Input.connect(self.FinalSupervoxels) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue(f) opExporter.hdf5Path.setValue('stack') opExporter.Image.connect(opTranspose.Output) if progressCallback is not None: opExporter.progressSignal.subscribe(progressCallback) req = Request(partial(self._runExporter, opExporter)) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed(exc, exc_info): cleanOps() f.close() msg = "Final Supervoxel export FAILED due to the following error:\n{}".format( exc) log_exception(logger, msg, exc_info) def handleFinished(result): # Generate the mapping transforms dataset mapping = self._opAccumulateFinalImage.Mapping.value num_labels = mapping.keys()[-1][1] transform = numpy.zeros(shape=(num_labels, 2), dtype=numpy.uint32) for (start, stop), body_id in mapping.items(): for supervoxel_label in range(start, stop): transform[supervoxel_label][0] = supervoxel_label if body_id == -1: # Special case: -1 means "identity transform" for this supervoxel # (Which is really an untouched raveler body) transform[supervoxel_label][1] = supervoxel_label else: transform[supervoxel_label][1] = body_id # Save the transform before closing the file f.create_dataset('transforms', data=transform) # Copy all other datasets from the original segmentation file. ravelerSegmentationInfo = self.DatasetInfos[2].value pathComponents = PathComponents(ravelerSegmentationInfo.filePath, self.WorkingDirectory.value) with h5py.File(pathComponents.externalPath, 'r') as originalFile: for k, dset in originalFile.items(): if k not in ['transforms', 'stack']: f.copy(dset, k) try: cleanOps() logger.info("FINISHED Final Supervoxel Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info("Final Supervoxel export was cancelled!") req.notify_failed(handleFailed) req.notify_finished(handleFinished) req.notify_cancelled(handleCancelled) req.submit() return req # Returned in case the user wants to cancel it.
def exportFinalSupervoxels(self, outputPath, axisorder, progressCallback=None): """ Executes the export process within a request. The (already-running) request is returned, in case you want to wait for it or monitor its progress. """ assert self.FinalSupervoxels.ready(), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes( parent=self ) opTranspose.AxisOrder.setValue( axisorder ) opTranspose.Input.connect( self.FinalSupervoxels ) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue( f ) opExporter.hdf5Path.setValue( 'stack' ) opExporter.Image.connect( opTranspose.Output ) if progressCallback is not None: opExporter.progressSignal.subscribe( progressCallback ) req = Request( partial(self._runExporter, opExporter) ) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed( exc, exc_info ): cleanOps() f.close() import traceback traceback.print_tb(exc_info[2]) msg = "Final Supervoxel export FAILED due to the following error:\n{}".format( exc ) logger.error( msg ) def handleFinished( result ): # Generate the mapping transforms dataset mapping = self._opAccumulateFinalImage.Mapping.value num_labels = mapping.keys()[-1][1] transform = numpy.zeros( shape=(num_labels, 2), dtype=numpy.uint32 ) for (start, stop), body_id in mapping.items(): for supervoxel_label in range(start, stop): transform[supervoxel_label][0] = supervoxel_label if body_id == -1: # Special case: -1 means "identity transform" for this supervoxel # (Which is really an untouched raveler body) transform[supervoxel_label][1] = supervoxel_label else: transform[supervoxel_label][1] = body_id # Save the transform before closing the file f.create_dataset('transforms', data=transform) # Copy all other datasets from the original segmentation file. ravelerSegmentationInfo = self.DatasetInfos[2].value pathComponents = PathComponents(ravelerSegmentationInfo.filePath, self.WorkingDirectory.value) with h5py.File(pathComponents.externalPath, 'r') as originalFile: for k,dset in originalFile.items(): if k not in ['transforms', 'stack']: f.copy(dset, k) try: cleanOps() logger.info("FINISHED Final Supervoxel Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info( "Final Supervoxel export was cancelled!" ) req.notify_failed( handleFailed ) req.notify_finished( handleFinished ) req.notify_cancelled( handleCancelled ) req.submit() return req # Returned in case the user wants to cancel it.