def raw2seg(config): input_paths = load_paths(config) gui_logger.info(f"Running the pipeline on: {input_paths}") gui_logger.info("Executing pipeline, see terminal for verbose logs.") all_pipeline_steps = [('preprocessing', configure_preprocessing_step), ('cnn_prediction', configure_cnn_step), ('cnn_postprocessing', configure_cnn_postprocessing_step), ('segmentation', configure_segmentation_step), ('segmentation_postprocessing', configure_segmentation_postprocessing_step)] for pipeline_step_name, pipeline_step_setup in all_pipeline_steps: gui_logger.info( f"Executing pipeline step: '{pipeline_step_name}'. Parameters: '{config[pipeline_step_name]}'. Files {input_paths}." ) pipeline_step = pipeline_step_setup(input_paths, config[pipeline_step_name]) output_paths = pipeline_step() # replace input_paths for all pipeline steps except DataPostProcessing3D if not isinstance(pipeline_step, DataPostProcessing3D): input_paths = output_paths gui_logger.info(f"Pipeline execution finished!")
def __init__(self, predictions_paths, nuclei_predictions_path, save_directory="LiftedMulticut", beta=0.6, run_ws=True, ws_2D=True, ws_threshold=0.4, ws_minsize=50, ws_sigma=2.0, ws_w_sigma=0, post_minsize=50, n_threads=6, state=True, **kwargs): super().__init__(input_paths=predictions_paths, save_directory=save_directory, file_suffix='_lmc', state=state) self.nuclei_predictions_paths = load_paths(nuclei_predictions_path) self.beta = beta # Watershed parameters self.run_ws = run_ws self.ws_2D = ws_2D self.ws_threshold = ws_threshold self.ws_minsize = ws_minsize self.ws_sigma = ws_sigma self.ws_w_sigma = ws_w_sigma # Post processing size threshold self.post_minsize = post_minsize # Multithread self.n_threads = n_threads