def _triggerTableUpdate(self): # Check that object area is included in selected features featureNames = self.topLevelOperatorView.SelectedFeatures.value if 'Standard Object Features' not in featureNames or 'Count' not in featureNames[ 'Standard Object Features']: box = QMessageBox( QMessageBox.Warning, 'Warning', 'Object area is not a selected feature. Please select this feature on: \"Standard Object Features > Shape > Size in pixels\"', QMessageBox.NoButton, self) box.show() return # Clear table self.table.clearContents() self.table.setRowCount(0) self.table.setSortingEnabled(False) self.progressBar.show() self.computeButton.setEnabled(False) # Compute object features and number of labels per frame def compute_features(): features = self.topLevelOperatorView.ObjectFeatures([]).wait() labels = self.topLevelOperatorView.LabelInputs([]).wait() return features, labels req = Request(compute_features) req.notify_finished(self._populateTable) req.submit()
def exportObjectCounts(self): opCounting = self.parentApplet.opCounting export_filepath = QFileDialog.getSaveFileName(parent=self, caption="Exported Object Counts", filter="*.csv") if not export_filepath: return self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested.emit() def _exportObjectCounts(): num_files = len(self.topLevelOperator.RawDatasetInfo) with open(export_filepath, 'w') as export_file: for lane_index, (info_slot, sum_slot) in enumerate(zip(self.topLevelOperator.RawDatasetInfo, opCounting.OutputSum)): self.parentApplet.progressSignal.emit(100.0*lane_index/num_files) nickname = info_slot.value.nickname object_count = sum_slot[:].wait()[0] export_file.write(nickname + "," + str(object_count) + "\n") self.parentApplet.busy = False self.parentApplet.progressSignal.emit(100) self.parentApplet.appletStateUpdateRequested.emit() req = Request(_exportObjectCounts) req.notify_failed( self.handleFailedObjectCountExport ) req.submit()
def export_object_data(self, settings, selected_features, gui=None): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ self.save_export_progress_dialog(None) if gui is None or "dialog" not in gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) else: progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display) # ();return request = Request(export) request.notify_failed(gui["fail"] if gui is not None and "fail" in gui else self.export_failed) request.notify_failed(self.export_failed) request.notify_finished(gui["ok"] if gui is not None and "ok" in gui else self.export_finished) request.notify_cancelled(gui["cancel"] if gui is not None and "cancel" in gui else self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel)
def _triggerTableUpdate(self): # Check that object area is included in selected features featureNames = self.topLevelOperatorView.SelectedFeatures.value if 'Standard Object Features' not in featureNames or 'Count' not in featureNames[ 'Standard Object Features']: box = QMessageBox( QMessageBox.Warning, 'Warning', 'Object area is not a selected feature. Please select this feature on: \"Standard Object Features > Shape > Size in pixels\"', QMessageBox.NoButton, self) box.show() return # Clear table self.table.clearContents() self.table.setRowCount(0) self.table.setSortingEnabled(False) self.progressBar.show() self.computeButton.setEnabled(False) def compute_features_for_frame(tIndex, t, features): # Compute features and labels (called in parallel from request pool) roi = [ slice(None) for i in range( len(self.topLevelOperatorView.LabelImages.meta.shape)) ] roi[tIndex] = slice(t, t + 1) roi = tuple(roi) frame = self.topLevelOperatorView.SegmentationImages(roi).wait() frame = frame.squeeze().astype(numpy.uint32, copy=False) # Dirty trick: We don't care what we're passing here for the 'image' parameter, # but vigra insists that we pass *something*, so we'll cast the label image as float32. features[t] = vigra.analysis.extractRegionFeatures( frame.view(numpy.float32), frame, ['Count'], ignoreLabel=0) tIndex = self.topLevelOperatorView.SegmentationImages.meta.axistags.index( 't') tMax = self.topLevelOperatorView.SegmentationImages.meta.shape[tIndex] features = {} labels = {} def compute_all_features(): # Compute features in parallel pool = RequestPool() for t in range(tMax): pool.add( Request( partial(compute_features_for_frame, tIndex, t, features))) pool.wait() # Compute labels labels = self.topLevelOperatorView.LabelInputs([]).wait() req = Request(compute_all_features) req.notify_finished(partial(self._populateTable, features, labels)) req.submit()
def run_export(self): role_names = self.parentApplet.dataSelectionApplet.topLevelOperator.DatasetRoles.value # Prepare file lists in an OrderedDict role_path_dict = OrderedDict() role_path_dict[0] = BatchProcessingGui.get_all_item_strings( self.list_widgets[0]) num_datasets = len(role_path_dict[0]) for role_index, list_widget in enumerate(self.list_widgets[1:], start=1): role_path_dict[ role_index] = BatchProcessingGui.get_all_item_strings( self.list_widgets[role_index]) assert len(role_path_dict[role_index]) <= num_datasets, \ "Too many files given for role: '{}'".format( role_names[role_index] ) if len(role_path_dict[role_index]) < num_datasets: role_path_dict[role_index] += [None] * ( num_datasets - len(role_path_dict[role_index])) # Run the export in a separate thread export_req = Request( partial(self.parentApplet.run_export, role_path_dict)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested.emit() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def run_export(self): role_names = self.parentApplet.dataSelectionApplet.topLevelOperator.DatasetRoles.value # Prepare file lists in an OrderedDict role_path_dict = OrderedDict() role_path_dict[0] = BatchProcessingGui.get_all_item_strings(self.list_widgets[0]) num_datasets = len(role_path_dict[0]) for role_index, list_widget in enumerate(self.list_widgets[1:], start=1): role_path_dict[role_index] = BatchProcessingGui.get_all_item_strings(self.list_widgets[role_index]) assert len(role_path_dict[role_index]) <= num_datasets, \ "Too many files given for role: '{}'".format( role_names[role_index] ) if len(role_path_dict[role_index]) < num_datasets: role_path_dict[role_index] += [None] * (num_datasets-len(role_path_dict[role_index])) # Run the export in a separate thread export_req = Request(partial(self.parentApplet.run_export, role_path_dict)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def run_export(self): role_names = self.parentApplet.dataSelectionApplet.role_names # Prepare file lists in an OrderedDict role_inputs = { role_name: self._data_role_widgets[role_name].filepaths for role_name in role_names } if all(len(role_inp) == 0 for role_inp in role_inputs.values()): return # Run the export in a separate thread lane_configs = self.parentApplet.dataSelectionApplet.create_lane_configs( role_inputs=role_inputs) export_req = Request( partial(self.parentApplet.run_export, lane_configs=lane_configs)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def lots_of_work(): requests = [] for i in range(mcount): req = Request(functools.partial(empty_func, b=11)) req.submit() for r in requests: r.wait()
def lots_of_work(): requests = [] for i in range(mcount): req = Request(functools.partial(empty_func, b = 11)) req.submit() for r in requests: r.wait()
def submit_to_threadpool(fn, priority): if USE_LAZYFLOW_THREADPOOL: # Tiling requests are less prioritized than most requests. root_priority = [1] + list(priority) req = Request(fn, root_priority) req.submit() else: get_render_pool().submit(fn, priority)
def replaceWithStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted: return files = stackDlg.selectedFiles if len(files) == 0: return info = DatasetInfo() info.filePath = "//".join(files) prefix = os.path.commonprefix(files) info.nickname = PathComponents(prefix).filenameBase # Add an underscore for each wildcard digit num_wildcards = len(files[-1]) - len(prefix) - len( os.path.splitext(files[-1])[1]) info.nickname += "_" * num_wildcards # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = (self.guiMode == GuiMode.Normal) info.fromstack = True originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None: laneIndex = self._findFirstEmptyLane(roleIndex) if len(self.topLevelOperator.DatasetGroup) < laneIndex + 1: self.topLevelOperator.DatasetGroup.resize(laneIndex + 1) def importStack(): self.guiControlSignal.emit(ControlCommand.DisableAll) # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset(info) try: self.topLevelOperator.DatasetGroup[laneIndex][ roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] return_val = [False] self.handleDatasetConstraintError(info, filename, ex, roleIndex, laneIndex, return_val) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. self.topLevelOperator.DatasetGroup.resize( originalNumLanes) finally: self.guiControlSignal.emit(ControlCommand.Pop) req = Request(importStack) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes)) req.submit()
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ settings, selected_features = self.get_table_export_settings() self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog(["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui, } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
def addStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted : return files = stackDlg.selectedFiles sequence_axis = stackDlg.sequence_axis if len(files) == 0: return info = DatasetInfo() info.filePath = os.path.pathsep.join( files ) prefix = os.path.commonprefix(files) info.nickname = PathComponents(prefix).filenameBase # Add an underscore for each wildcard digit num_wildcards = len(files[-1]) - len(prefix) - len( os.path.splitext(files[-1])[1] ) info.nickname += "_"*num_wildcards # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = ( self.guiMode == GuiMode.Normal ) info.fromstack = True originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None or laneIndex == -1: laneIndex = len(self.topLevelOperator.DatasetGroup) if len(self.topLevelOperator.DatasetGroup) < laneIndex+1: self.topLevelOperator.DatasetGroup.resize(laneIndex+1) def importStack(): self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested.emit() # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset( info, sequence_axis ) try: self.topLevelOperator.DatasetGroup[laneIndex][roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] return_val = [False] self.handleDatasetConstraintError( info, filename, ex, roleIndex, laneIndex, return_val ) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. self.topLevelOperator.DatasetGroup.resize(originalNumLanes) finally: self.parentApplet.busy = False self.parentApplet.appletStateUpdateRequested.emit() req = Request( importStack ) req.notify_finished( lambda result: self.showDataset(laneIndex, roleIndex) ) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes ) ) req.submit()
def _exportMeshes(self, object_names: List[str], obj_filepaths: List[str]) -> Request: """Save objects in the mst to .obj files Args: object_names: Names of the objects in the mst obj_filepaths: One path for each object in object_names Returns: Returns the request object, used in testing """ def get_label_volume_from_mst(mst, object_name): object_supervoxels = mst.object_lut[object_name] object_lut = numpy.zeros(mst.nodeNum+1, dtype=numpy.int32) object_lut[object_supervoxels] = 1 supervoxel_volume = mst.supervoxelUint32 object_volume = object_lut[supervoxel_volume] return object_volume mst = self.topLevelOperatorView.MST.value def exportMeshes(object_names, obj_filepaths): n_objects = len(object_names) progress_update = 100 / n_objects try: for obj, obj_path, obj_n in zip(object_names, obj_filepaths, range(n_objects)): object_volume = get_label_volume_from_mst(mst, obj) unique_ids = len(numpy.unique(object_volume)) if unique_ids <= 1: logger.info(f"No voxels found for {obj}, skipping") continue elif unique_ids > 2: logger.info(f"Supervoxel segmentation not unique for {obj}, skipping, got {unique_ids}") continue logger.info(f"Generating mesh for {obj}") _, mesh_data = list(labeling_to_mesh(object_volume, [1]))[0] self.parentApplet.progressSignal((obj_n + .5) * progress_update) logger.info(f"Mesh generation for {obj} complete.") logger.info(f"Saving mesh for {obj} to {obj_path}") mesh_to_obj(mesh_data, obj_path, obj) self.parentApplet.progressSignal((obj_n + 1) * progress_update) finally: self.parentApplet.busy = False self.parentApplet.progressSignal(100) self.parentApplet.appletStateUpdateRequested() self.parentApplet.busy = True self.parentApplet.progressSignal(-1) self.parentApplet.appletStateUpdateRequested() req = Request(partial(exportMeshes, object_names, obj_filepaths)) req.submit() return req
def addStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted: return files = stackDlg.selectedFiles sequence_axis = stackDlg.sequence_axis if len(files) == 0: return cwd = self.topLevelOperator.WorkingDirectory.value info = DatasetInfo(os.path.pathsep.join(files), cwd=cwd) originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None or laneIndex == -1: laneIndex = len(self.topLevelOperator.DatasetGroup) if len(self.topLevelOperator.DatasetGroup) < laneIndex + 1: self.topLevelOperator.DatasetGroup.resize(laneIndex + 1) def importStack(): self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset(info, sequence_axis) try: self.topLevelOperator.DatasetGroup[laneIndex][ roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] return_val = [False] self.parentApplet.busy = False # required for possible fixing dialogs from DatasetConstraintError self.parentApplet.appletStateUpdateRequested() self.handleDatasetConstraintError(info, filename, ex, roleIndex, laneIndex, return_val) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. self.topLevelOperator.DatasetGroup.resize( originalNumLanes) finally: self.parentApplet.busy = False self.parentApplet.appletStateUpdateRequested() req = Request(importStack) req.notify_finished( lambda result: self.showDataset(laneIndex, roleIndex)) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes)) req.submit()
def _triggerTableUpdate(self): # Check that object area is included in selected features featureNames = self.topLevelOperatorView.SelectedFeatures.value if 'Standard Object Features' not in featureNames or 'Count' not in featureNames['Standard Object Features']: box = QMessageBox(QMessageBox.Warning, 'Warning', 'Object area is not a selected feature. Please select this feature on: \"Standard Object Features > Shape > Size in pixels\"', QMessageBox.NoButton, self) box.show() return # Clear table self.table.clearContents() self.table.setRowCount(0) self.table.setSortingEnabled(False) self.progressBar.show() self.computeButton.setEnabled(False) def compute_features_for_frame(tIndex, t, features): # Compute features and labels (called in parallel from request pool) roi = [slice(None) for i in range(len(self.topLevelOperatorView.LabelImages.meta.shape))] roi[tIndex] = slice(t, t+1) roi = tuple(roi) frame = self.topLevelOperatorView.SegmentationImages(roi).wait() frame = frame.squeeze().astype(numpy.uint32, copy=False) # Dirty trick: We don't care what we're passing here for the 'image' parameter, # but vigra insists that we pass *something*, so we'll cast the label image as float32. features[t] = vigra.analysis.extractRegionFeatures(frame.view(numpy.float32), frame, ['Count'], ignoreLabel=0) tIndex = self.topLevelOperatorView.SegmentationImages.meta.axistags.index('t') tMax = self.topLevelOperatorView.SegmentationImages.meta.shape[tIndex] features = {} labels = {} def compute_all_features(): # Compute features in parallel pool = RequestPool() for t in range(tMax): pool.add( Request( partial(compute_features_for_frame, tIndex, t, features) ) ) pool.wait() # Compute labels labels = self.topLevelOperatorView.LabelInputs([]).wait() req = Request(compute_all_features) req.notify_finished( partial(self._populateTable, features, labels) ) req.submit()
def testBasic(self): """ Test the SimpleRequestCondition, which is like threading.Condition, but with a subset of the functionality. (See the docs for details.) """ # num_workers = Request.global_thread_pool.num_workers # Request.reset_thread_pool(num_workers=1) N_ELEMENTS = 100 # It's tempting to simply use threading.Condition here, # but that doesn't quite work if the thread calling wait() is also a worker thread. # (threading.Condition uses threading.Lock() as it's 'waiter' lock, which blocks the entire worker.) # cond = threading.Condition( RequestLock() ) cond = SimpleRequestCondition() produced = [] consumed = [] def wait_for_all(): def f(i): time.sleep(0.2 * random.random()) with cond: produced.append(i) cond.notify() reqs = [] for i in range(N_ELEMENTS): req = Request(partial(f, i)) reqs.append(req) for req in reqs: req.submit() _consumed = consumed with cond: while len(_consumed) < N_ELEMENTS: while len(_consumed) == len(produced): cond.wait() logger.debug("copying {} elements".format( len(produced) - len(consumed))) _consumed += produced[len(_consumed):] # Force the request to run in a worker thread. # This should catch failures that can occur if the Condition's "waiter" lock isn't a request lock. req = Request(wait_for_all) req.submit() # Now block for completion req.wait() logger.debug("produced: {}".format(produced)) logger.debug("consumed: {}".format(consumed)) assert set(consumed) == set( range(N_ELEMENTS) ), "Expected set(range(N_ELEMENTS)), got {}".format(consumed)
def exportFinalSegmentation(self, outputPath, axisorder, progressCallback=None): assert self.FinalSegmentation.ready( ), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes(parent=self) opTranspose.AxisOrder.setValue(axisorder) opTranspose.Input.connect(self.FinalSegmentation) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue(f) opExporter.hdf5Path.setValue('split_result') opExporter.Image.connect(opTranspose.Output) if progressCallback is not None: opExporter.progressSignal.subscribe(progressCallback) req = Request(partial(self._runExporter, opExporter)) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed(exc, exc_info): cleanOps() f.close() import traceback traceback.print_tb(exc_info[2]) msg = "Final Segmentation export FAILED due to the following error:\n{}".format( exc) logger.error(msg) def handleFinished(result): try: cleanOps() logger.info("FINISHED Final Segmentation Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info("Final Segmentation export was cancelled!") req.notify_failed(handleFailed) req.notify_finished(handleFinished) req.notify_cancelled(handleCancelled) req.submit() return req # Returned in case the user wants to cancel it.
def _update_rendering(self): """ Override from the base class. """ # This update has to be performed in a different thread to avoid a deadlock # (Because this function is running in the context of a dirty notification!) req = Request( self.__update_rendering ) def handle_rendering_failure( exc, exc_info ): msg = "Exception raised during volume rendering update. See traceack above.\n" log_exception( logger, msg, exc_info ) req.notify_failed( handle_rendering_failure ) req.submit()
def testBasic(self): """ Test the SimpleRequestCondition, which is like threading.Condition, but with a subset of the functionality. (See the docs for details.) """ # num_workers = Request.global_thread_pool.num_workers # Request.reset_thread_pool(num_workers=1) N_ELEMENTS = 100 # It's tempting to simply use threading.Condition here, # but that doesn't quite work if the thread calling wait() is also a worker thread. # (threading.Condition uses threading.Lock() as it's 'waiter' lock, which blocks the entire worker.) # cond = threading.Condition( RequestLock() ) cond = SimpleRequestCondition() produced = [] consumed = [] def wait_for_all(): def f(i): time.sleep(0.2 * random.random()) with cond: produced.append(i) cond.notify() reqs = [] for i in range(N_ELEMENTS): req = Request(partial(f, i)) reqs.append(req) for req in reqs: req.submit() _consumed = consumed with cond: while len(_consumed) < N_ELEMENTS: while len(_consumed) == len(produced): cond.wait() logger.debug("copying {} elements".format(len(produced) - len(consumed))) _consumed += produced[len(_consumed) :] # Force the request to run in a worker thread. # This should catch failures that can occur if the Condition's "waiter" lock isn't a request lock. req = Request(wait_for_all) req.submit() # Now block for completion req.wait() logger.debug("produced: {}".format(produced)) logger.debug("consumed: {}".format(consumed)) assert set(consumed) == set(range(N_ELEMENTS)), "Expected set(range(N_ELEMENTS)), got {}".format(consumed)
def addStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted : return files = stackDlg.selectedFiles sequence_axis = stackDlg.sequence_axis if len(files) == 0: return cwd = self.topLevelOperator.WorkingDirectory.value info = DatasetInfo(os.path.pathsep.join(files), cwd=cwd) originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None or laneIndex == -1: laneIndex = len(self.topLevelOperator.DatasetGroup) if len(self.topLevelOperator.DatasetGroup) < laneIndex+1: self.topLevelOperator.DatasetGroup.resize(laneIndex+1) def importStack(): self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset( info, sequence_axis ) try: self.topLevelOperator.DatasetGroup[laneIndex][roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] return_val = [False] self.parentApplet.busy = False # required for possible fixing dialogs from DatasetConstraintError self.parentApplet.appletStateUpdateRequested() self.handleDatasetConstraintError( info, filename, ex, roleIndex, laneIndex, return_val ) if not return_val[0]: # Not successfully repaired. Roll back the changes and give up. self.topLevelOperator.DatasetGroup.resize(originalNumLanes) finally: self.parentApplet.busy = False self.parentApplet.appletStateUpdateRequested() req = Request( importStack ) req.notify_finished( lambda result: self.showDataset(laneIndex, roleIndex) ) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes ) ) req.submit()
def exportFinalSegmentation(self, outputPath, axisorder, progressCallback=None): assert self.FinalSegmentation.ready(), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes( parent=self ) opTranspose.AxisOrder.setValue( axisorder ) opTranspose.Input.connect( self.FinalSegmentation ) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue( f ) opExporter.hdf5Path.setValue( 'split_result' ) opExporter.Image.connect( opTranspose.Output ) if progressCallback is not None: opExporter.progressSignal.subscribe( progressCallback ) req = Request( partial(self._runExporter, opExporter) ) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed( exc, exc_info ): cleanOps() f.close() import traceback traceback.print_tb(exc_info[2]) msg = "Final Segmentation export FAILED due to the following error:\n{}".format( exc ) logger.error( msg ) def handleFinished( result ): try: cleanOps() logger.info("FINISHED Final Segmentation Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info( "Final Segmentation export was cancelled!" ) req.notify_failed( handleFailed ) req.notify_finished( handleFinished ) req.notify_cancelled( handleCancelled ) req.submit() return req # Returned in case the user wants to cancel it.
def replaceWithStack(self, roleIndex, laneIndex): """ The user clicked the "Import Stack Files" button. """ stackDlg = StackFileSelectionWidget(self) stackDlg.exec_() if stackDlg.result() != QDialog.Accepted : return files = stackDlg.selectedFiles if len(files) == 0: return info = DatasetInfo() info.filePath = "//".join( files ) prefix = os.path.commonprefix(files) info.nickname = PathComponents(prefix).filenameBase + "..." # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = ( self.guiMode == GuiMode.Normal ) info.fromstack = True originalNumLanes = len(self.topLevelOperator.DatasetGroup) if laneIndex is None: laneIndex = self._findFirstEmptyLane(roleIndex) if len(self.topLevelOperator.DatasetGroup) < laneIndex+1: self.topLevelOperator.DatasetGroup.resize(laneIndex+1) def importStack(): self.guiControlSignal.emit( ControlCommand.DisableAll ) # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset( info ) try: self.topLevelOperator.DatasetGroup[laneIndex][roleIndex].setValue(info) except DatasetConstraintError as ex: # Give the user a chance to repair the problem. filename = files[0] + "\n...\n" + files[-1] if not self.handleDatasetConstraintError( info, filename, ex, roleIndex, laneIndex ): self.topLevelOperator.DatasetGroup.resize(originalNumLanes) finally: self.guiControlSignal.emit( ControlCommand.Pop ) req = Request( importStack ) req.notify_failed( partial(self.handleFailedStackLoad, files, originalNumLanes ) ) req.submit()
def execute(self, slot, subindex, roi, result): """ Simulate a cascade of requests, to make sure that the entire cascade is properly freed. """ roiShape = roi.stop - roi.start def getResults1(): return numpy.indices(roiShape, self.Output.meta.dtype).sum() def getResults2(): req = Request( getResults1 ) req.submit() result[:] = req.wait() return result req = Request( getResults2 ) req.submit() result[:] = req.wait() return result
def run(self, opExport): """ Start the export and return immediately (after showing the progress dialog). :param opExport: The export object to execute. It must have a 'run_export()' method and a 'progressSignal' member. """ progressDlg = MultiStepProgressDialog(parent=self.parent()) progressDlg.setNumberOfSteps(1) def _forwardProgressToGui(progress): self._forwardingSignal.emit( partial(progressDlg.setStepProgress, progress)) opExport.progressSignal.subscribe(_forwardProgressToGui) def _onFinishExport(*args): # Also called on cancel self._forwardingSignal.emit(progressDlg.finishStep) def _onFail(exc, exc_info): import traceback traceback.print_tb(exc_info[2]) msg = "Failed to export layer due to the following error:\n{}".format( exc) self._forwardingSignal.emit( partial(QMessageBox.critical, self.parent(), "Export Failed", msg)) self._forwardingSignal.emit(progressDlg.setFailed) # Use a request to execute in the background req = Request(opExport.run_export) req.notify_cancelled(_onFinishExport) req.notify_finished(_onFinishExport) req.notify_failed(_onFail) # Allow cancel. progressDlg.rejected.connect(req.cancel) # Start the export req.submit() # Execute the progress dialog # (We can block the thread here because the QDialog spins up its own event loop.) progressDlg.exec_()
def execute(self, slot, subindex, roi, result): """ Simulate a cascade of requests, to make sure that the entire cascade is properly freed. """ roiShape = roi.stop - roi.start def getResults1(): return numpy.indices(roiShape, self.Output.meta.dtype).sum() def getResults2(): req = Request(getResults1) req.submit() result[:] = req.wait() return result req = Request(getResults2) req.submit() result[:] = req.wait() return result
def test_cancellation_behavior(): """ If a request is cancelled while it was waiting on a lock, it should raise the CancellationException. """ lock = RequestLock() lock.acquire() def f(): try: with lock: assert False except Request.CancellationException: pass else: assert False finished = [False] cancelled = [False] failed = [False] def handle_finished(result): finished[0] = True def handle_cancelled(): cancelled[0] = True def handle_failed(*args): failed[0] = True req = Request(f) req.notify_finished(handle_finished) req.notify_failed(handle_failed) req.notify_cancelled(handle_cancelled) req.submit() req.cancel() time.sleep(0.1) lock.release() time.sleep(0.1) assert not finished[0] and not failed[0] and cancelled[0]
def export_object_data(self, settings, selected_features, gui=None): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ self.save_export_progress_dialog(None) if gui is None or "dialog" not in gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) else: progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel)
def run_export(self): role_names = self.parentApplet.dataSelectionApplet.topLevelOperator.DatasetRoles.value # Prepare file lists in an OrderedDict role_path_dict = OrderedDict( (role_name, self._data_role_widgets[role_name].filepaths) for role_name in role_names ) dominant_role_name = role_names[0] num_paths = len(role_path_dict[dominant_role_name]) if num_paths == 0: return for role_name in role_names[1:]: paths = role_path_dict[role_name] if len(paths) == 0: role_path_dict[role_name] = [None] * num_paths if len(role_path_dict[role_name]) != num_paths: raise BatchProcessingDataConstraintException( f"Number of files for '{role_name!r}' does not match! " f"Exptected {num_paths} files." ) # Run the export in a separate thread export_req = Request(partial(self.parentApplet.run_export, role_path_dict)) export_req.notify_failed(self.handle_batch_processing_failure) export_req.notify_finished(self.handle_batch_processing_finished) export_req.notify_cancelled(self.handle_batch_processing_cancelled) self.export_req = export_req self.parentApplet.busy = True self.parentApplet.appletStateUpdateRequested() self.cancel_button.setVisible(True) self.run_button.setEnabled(False) # Start the export export_req.submit()
def wait_for_all(): def f(i): time.sleep(0.2*random.random()) with cond: produced.append(i) cond.notify() reqs = [] for i in range(N_ELEMENTS): req = Request( partial(f, i) ) reqs.append( req ) for req in reqs: req.submit() _consumed = consumed with cond: while len(_consumed) < N_ELEMENTS: while len(_consumed) == len(produced): cond.wait() logger.debug( "copying {} elements".format( len(produced) - len(consumed) ) ) _consumed += produced[len(_consumed):]
def run(self, opExport): """ Start the export and return immediately (after showing the progress dialog). :param opExport: The export object to execute. It must have a 'run_export()' method and a 'progressSignal' member. """ progressDlg = MultiStepProgressDialog(parent=self.parent()) progressDlg.setNumberOfSteps(1) def _forwardProgressToGui(progress): self._forwardingSignal.emit( partial( progressDlg.setStepProgress, progress ) ) opExport.progressSignal.subscribe( _forwardProgressToGui ) def _onFinishExport( *args ): # Also called on cancel self._forwardingSignal.emit( progressDlg.finishStep ) def _onFail( exc, exc_info ): import traceback traceback.print_tb(exc_info[2]) msg = "Failed to export layer due to the following error:\n{}".format( exc ) self._forwardingSignal.emit( partial(QMessageBox.critical, self.parent(), "Export Failed", msg) ) self._forwardingSignal.emit( progressDlg.setFailed ) # Use a request to execute in the background req = Request( opExport.run_export ) req.notify_cancelled( _onFinishExport ) req.notify_finished( _onFinishExport ) req.notify_failed( _onFail ) # Allow cancel. progressDlg.rejected.connect( req.cancel ) # Start the export req.submit() # Execute the progress dialog # (We can block the thread here because the QDialog spins up its own event loop.) progressDlg.exec_()
def importStackFromGlobString(self, globString): """ The word 'glob' is used loosely here. See the OpStackLoader operator for details. """ globString = globString.replace("\\","/") info = DatasetInfo() info.filePath = globString # Allow labels by default if this gui isn't being used for batch data. info.allowLabels = ( self.guiMode == GuiMode.Normal ) def importStack(): self.guiControlSignal.emit( ControlCommand.DisableAll ) # Serializer will update the operator for us, which will propagate to the GUI. try: self.serializer.importStackAsLocalDataset( info ) finally: self.guiControlSignal.emit( ControlCommand.Pop ) req = Request( importStack ) req.notify_failed( partial(self.handleFailedStackLoad, globString ) ) req.submit()
def wait_for_all(): def f(i): time.sleep(0.2 * random.random()) with cond: produced.append(i) cond.notify() reqs = [] for i in range(N_ELEMENTS): req = Request(partial(f, i)) reqs.append(req) for req in reqs: req.submit() _consumed = consumed with cond: while len(_consumed) < N_ELEMENTS: while len(_consumed) == len(produced): cond.wait() logger.debug("copying {} elements".format( len(produced) - len(consumed))) _consumed += produced[len(_consumed):]
def getResults2(): req = Request(getResults1) req.submit() result[:] = req.wait() return result
def lf_req(fn): r = Request(fn) r.submit() return r
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework :param settings: the settings from the GUI export dialog :type settings: dict :param selected_features: the features to export from the GUI dialog :type selected_features: list :param gui: the Progress bar and callbacks for finish/fail/cancel see ExportingGui.show_export_dialog :type gui: dict """ settings, selected_features = self.get_table_export_settings() self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", xrange(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog( ["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
t1 = time.time() def lots_of_work(): requests = [] for i in range(mcount): req = Request(functools.partial(empty_func, b=11)) req.submit() for r in requests: r.wait() # Make sure this test occurs entirely within greenlets. req = Request(functools.partial(lots_of_work)) req.submit() req.wait() t2 = time.time() print "\n\n" print "LAZYFLOW REQUEST WAIT: %f seconds for %d iterations" % (t2 - t1, mcount) print " %0.3fms latency" % ( (t2 - t1) * 1e3 / mcount, ) t1 = time.time() pool = Pool() for i in range(50000): pool.request(functools.partial(empty_func, b=11))
def _onTrackButtonPressed( self ): if not self.mainOperator.ObjectFeatures.ready(): self._criticalMessage("You have to compute object features first.") return def _track(): self.applet.busy = True self.applet.appletStateUpdateRequested.emit() app = self._drawer.appSpinBox.value() dis = self._drawer.disSpinBox.value() opp = self._drawer.oppSpinBox.value() noiserate = self._drawer.noiseRateSpinBox.value() noiseweight = self._drawer.noiseWeightSpinBox.value() epGap = self._drawer.epGapSpinBox.value() n_neighbors = self._drawer.nNeighborsSpinBox.value() with_div = self._drawer.withDivisionsBox.isChecked() cplex_timeout = None if len(str(self._drawer.timeoutBox.text())): cplex_timeout = int(self._drawer.timeoutBox.text()) from_t = self._drawer.from_time.value() to_t = self._drawer.to_time.value() from_x = self._drawer.from_x.value() to_x = self._drawer.to_x.value() from_y = self._drawer.from_y.value() to_y = self._drawer.to_y.value() from_z = self._drawer.from_z.value() to_z = self._drawer.to_z.value() from_size = self._drawer.from_size.value() to_size = self._drawer.to_size.value() try: self.mainOperator.track( time_range = range(from_t, to_t + 1), x_range = (from_x, to_x + 1), y_range = (from_y, to_y + 1), z_range = (from_z, to_z + 1), size_range = (from_size, to_size + 1), x_scale = self._drawer.x_scale.value(), y_scale = self._drawer.y_scale.value(), z_scale = self._drawer.z_scale.value(), app=app, dis=dis, noiserate = noiserate, noiseweight = noiseweight, opp=opp, ep_gap=epGap, n_neighbors=n_neighbors, with_div=with_div, cplex_timeout=cplex_timeout) except Exception: ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) self._criticalMessage("Exception(" + str(ex_type) + "): " + str(ex)) return def _handle_finished(*args): self.applet.progressSignal.emit(100) self._drawer.TrackButton.setEnabled(True) self._drawer.exportButton.setEnabled(True) self._drawer.exportTifButton.setEnabled(True) self._setLayerVisible("Objects", False) self.applet.busy = False self.applet.appletStateUpdateRequested.emit() def _handle_failure( exc, exc_info ): self.applet.progressSignal.emit(100) traceback.print_exception(*exc_info) sys.stderr.write("Exception raised during tracking. See traceback above.\n") self._drawer.TrackButton.setEnabled(True) self.applet.busy = False self.applet.appletStateUpdateRequested.emit() self._drawer.TrackButton.setEnabled(False) self.applet.progressSignal.emit(0) self.applet.progressSignal.emit(-1) req = Request( _track ) req.notify_failed( _handle_failure ) req.notify_finished( _handle_finished ) req.submit()
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework Args: lane_index (int): Index of the lane to be exported show_gui (bool, optional): boolean to determine whether or not to show gui filename_suffix (str, optional): If provided, appended to the filename (before the extension) Returns: lazyflow.request.Request: Request object from which the result can be obtained. """ settings, selected_features = self.get_table_export_settings() if not settings: return Request.with_value(None) self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", range(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog( ["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui, } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
def getResults2(): req = Request( getResults1 ) req.submit() result[:] = req.wait() return result
def export_object_data(self, lane_index, show_gui=False, filename_suffix=""): """ Initialize progress displays and start the actual export in a new thread using the lazyflow.request framework Args: lane_index (int): Index of the lane to be exported show_gui (bool, optional): boolean to determine whether or not to show gui filename_suffix (str, optional): If provided, appended to the filename (before the extension) Returns: lazyflow.request.Request: Request object from which the result can be obtained. """ settings, selected_features = self.get_table_export_settings() self.save_export_progress_dialog(None) if not show_gui: progress_display = ProgressPrinter("Export Progress", range(100, -1, -5), 2) gui = None else: from ilastik.widgets.progressDialog import ProgressDialog progress = ProgressDialog(["Feature Data", "Labeling Rois", "Raw Image", "Exporting"]) progress.set_busy(True) progress.show() gui = { "dialog": progress, "ok": partial(progress.safe_popup, "information", "Information", "Export successful!"), "cancel": partial(progress.safe_popup, "information", "Information", "Export cancelled!"), "fail": partial(progress.safe_popup, "critical", "Critical", "Export failed!"), "unlock": self.unlock_gui, "lock": self.lock_gui } progress_display = gui["dialog"] self.save_export_progress_dialog(progress_display) export = partial(self.do_export, settings, selected_features, progress_display, lane_index, filename_suffix) request = Request(export) if gui is not None: if "fail" in gui: request.notify_failed(gui["fail"]) if "ok" in gui: request.notify_finished(gui["ok"]) if "cancel" in gui: request.notify_cancelled(gui["cancel"]) if "unlock" in gui: request.notify_cancelled(gui["unlock"]) request.notify_failed(gui["unlock"]) request.notify_finished(gui["unlock"]) if "lock" in gui: lock = gui["lock"] lock() request.notify_failed(self.export_failed) request.notify_finished(self.export_finished) request.notify_cancelled(self.export_cancelled) request.submit() if gui is not None and "dialog" in gui: progress_display.cancel.connect(request.cancel) return request
def exportFinalSupervoxels(self, outputPath, axisorder, progressCallback=None): """ Executes the export process within a request. The (already-running) request is returned, in case you want to wait for it or monitor its progress. """ assert self.FinalSupervoxels.ready(), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes( parent=self ) opTranspose.AxisOrder.setValue( axisorder ) opTranspose.Input.connect( self.FinalSupervoxels ) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue( f ) opExporter.hdf5Path.setValue( 'stack' ) opExporter.Image.connect( opTranspose.Output ) if progressCallback is not None: opExporter.progressSignal.subscribe( progressCallback ) req = Request( partial(self._runExporter, opExporter) ) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed( exc, exc_info ): cleanOps() f.close() import traceback traceback.print_tb(exc_info[2]) msg = "Final Supervoxel export FAILED due to the following error:\n{}".format( exc ) logger.error( msg ) def handleFinished( result ): # Generate the mapping transforms dataset mapping = self._opAccumulateFinalImage.Mapping.value num_labels = mapping.keys()[-1][1] transform = numpy.zeros( shape=(num_labels, 2), dtype=numpy.uint32 ) for (start, stop), body_id in mapping.items(): for supervoxel_label in range(start, stop): transform[supervoxel_label][0] = supervoxel_label if body_id == -1: # Special case: -1 means "identity transform" for this supervoxel # (Which is really an untouched raveler body) transform[supervoxel_label][1] = supervoxel_label else: transform[supervoxel_label][1] = body_id # Save the transform before closing the file f.create_dataset('transforms', data=transform) # Copy all other datasets from the original segmentation file. ravelerSegmentationInfo = self.DatasetInfos[2].value pathComponents = PathComponents(ravelerSegmentationInfo.filePath, self.WorkingDirectory.value) with h5py.File(pathComponents.externalPath, 'r') as originalFile: for k,dset in originalFile.items(): if k not in ['transforms', 'stack']: f.copy(dset, k) try: cleanOps() logger.info("FINISHED Final Supervoxel Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info( "Final Supervoxel export was cancelled!" ) req.notify_failed( handleFailed ) req.notify_finished( handleFinished ) req.notify_cancelled( handleCancelled ) req.submit() return req # Returned in case the user wants to cancel it.
usePool = False t = time() pool = RequestPool() for i in range(5): req = Request(f) pool.add(req) pool.wait() print(time() - t) t = time() agg = [] for i in range(5): req = Request(f) req.submit() agg.append(req) for i in range(5): agg[i].wait() print(time() - t) t = time() agg = [] for i in range(5): req = Request(f) agg.append(req) for i in range(5): agg[i].wait() print(time() - t)
def _onTrackButtonPressed(self): if not self.mainOperator.ObjectFeatures.ready(): self._criticalMessage("You have to compute object features first.") return def _track(): self.applet.busy = True self.applet.appletStateUpdateRequested.emit() app = self._drawer.appSpinBox.value() dis = self._drawer.disSpinBox.value() opp = self._drawer.oppSpinBox.value() noiserate = self._drawer.noiseRateSpinBox.value() noiseweight = self._drawer.noiseWeightSpinBox.value() epGap = self._drawer.epGapSpinBox.value() n_neighbors = self._drawer.nNeighborsSpinBox.value() with_div = self._drawer.withDivisionsBox.isChecked() cplex_timeout = None if len(str(self._drawer.timeoutBox.text())): cplex_timeout = int(self._drawer.timeoutBox.text()) from_t = self._drawer.from_time.value() to_t = self._drawer.to_time.value() from_x = self._drawer.from_x.value() to_x = self._drawer.to_x.value() from_y = self._drawer.from_y.value() to_y = self._drawer.to_y.value() from_z = self._drawer.from_z.value() to_z = self._drawer.to_z.value() from_size = self._drawer.from_size.value() to_size = self._drawer.to_size.value() try: self.mainOperator.track(time_range=range(from_t, to_t + 1), x_range=(from_x, to_x + 1), y_range=(from_y, to_y + 1), z_range=(from_z, to_z + 1), size_range=(from_size, to_size + 1), x_scale=self._drawer.x_scale.value(), y_scale=self._drawer.y_scale.value(), z_scale=self._drawer.z_scale.value(), app=app, dis=dis, noiserate=noiserate, noiseweight=noiseweight, opp=opp, ep_gap=epGap, n_neighbors=n_neighbors, with_div=with_div, cplex_timeout=cplex_timeout) except Exception: ex_type, ex, tb = sys.exc_info() log_exception(logger) self._criticalMessage("Exception(" + str(ex_type) + "): " + str(ex)) return def _handle_finished(*args): self.applet.progressSignal.emit(100) self._drawer.TrackButton.setEnabled(True) self._drawer.exportButton.setEnabled(True) self._drawer.exportTifButton.setEnabled(True) self._setLayerVisible("Objects", False) self.applet.busy = False self.applet.appletStateUpdateRequested.emit() def _handle_failure(exc, exc_info): self.applet.progressSignal.emit(100) msg = "Exception raised during tracking. See traceback above.\n" log_exception(logger, msg, exc_info) self._drawer.TrackButton.setEnabled(True) self.applet.busy = False self.applet.appletStateUpdateRequested.emit() self._drawer.TrackButton.setEnabled(False) self.applet.progressSignal.emit(0) self.applet.progressSignal.emit(-1) req = Request(_track) req.notify_failed(_handle_failure) req.notify_finished(_handle_finished) req.submit()
def exportFinalSupervoxels(self, outputPath, axisorder, progressCallback=None): """ Executes the export process within a request. The (already-running) request is returned, in case you want to wait for it or monitor its progress. """ assert self.FinalSupervoxels.ready( ), "Can't export yet: The final segmentation isn't ready!" logger.info("Starting Final Segmentation Export...") opTranspose = OpReorderAxes(parent=self) opTranspose.AxisOrder.setValue(axisorder) opTranspose.Input.connect(self.FinalSupervoxels) f = h5py.File(outputPath, 'w') opExporter = OpH5WriterBigDataset(parent=self) opExporter.hdf5File.setValue(f) opExporter.hdf5Path.setValue('stack') opExporter.Image.connect(opTranspose.Output) if progressCallback is not None: opExporter.progressSignal.subscribe(progressCallback) req = Request(partial(self._runExporter, opExporter)) def cleanOps(): opExporter.cleanUp() opTranspose.cleanUp() def handleFailed(exc, exc_info): cleanOps() f.close() msg = "Final Supervoxel export FAILED due to the following error:\n{}".format( exc) log_exception(logger, msg, exc_info) def handleFinished(result): # Generate the mapping transforms dataset mapping = self._opAccumulateFinalImage.Mapping.value num_labels = mapping.keys()[-1][1] transform = numpy.zeros(shape=(num_labels, 2), dtype=numpy.uint32) for (start, stop), body_id in mapping.items(): for supervoxel_label in range(start, stop): transform[supervoxel_label][0] = supervoxel_label if body_id == -1: # Special case: -1 means "identity transform" for this supervoxel # (Which is really an untouched raveler body) transform[supervoxel_label][1] = supervoxel_label else: transform[supervoxel_label][1] = body_id # Save the transform before closing the file f.create_dataset('transforms', data=transform) # Copy all other datasets from the original segmentation file. ravelerSegmentationInfo = self.DatasetInfos[2].value pathComponents = PathComponents(ravelerSegmentationInfo.filePath, self.WorkingDirectory.value) with h5py.File(pathComponents.externalPath, 'r') as originalFile: for k, dset in originalFile.items(): if k not in ['transforms', 'stack']: f.copy(dset, k) try: cleanOps() logger.info("FINISHED Final Supervoxel Export") finally: f.close() def handleCancelled(): cleanOps() f.close() logger.info("Final Supervoxel export was cancelled!") req.notify_failed(handleFailed) req.notify_finished(handleFinished) req.notify_cancelled(handleCancelled) req.submit() return req # Returned in case the user wants to cancel it.