def process_proc_tasks( path: Optional[str] = None, series_list: Optional[Sequence[int]] = None ) -> Optional[Dict[config.ProcessTypes, Any]]: """Apply processing tasks. Args: path: Base path to main image file; defaults to None, in which case :attr:`config.filename` will be used. series_list: Returns: """ if path is None: path = config.filename if not path: print("No image filename set for processing files, skipping") return None if series_list is None: series_list = config.series_list # filter out unset tasks proc_tasks = {k: v for k, v in config.proc_type.items() if v} for series in series_list: # process files for each series, typically a tile within a # microscopy image set or a single whole image filename, offset, size, reg_suffixes = \ importer.deconstruct_img_name(path) set_subimg, _ = importer.parse_deconstructed_name( filename, offset, size, reg_suffixes) if not set_subimg: # sub-image parameters set in filename takes precedence for # the loaded image, but fall back to user-supplied args offset = (config.subimg_offsets[0] if config.subimg_offsets else None) size = (config.subimg_sizes[0] if config.subimg_sizes else None) if proc_tasks: for proc_task, proc_val in proc_tasks.items(): # set up image for the given task np_io.setup_images(filename, series, offset, size, proc_task, fallback_main_img=False) process_file( filename, proc_task, proc_val, series, offset, size, config.roi_offsets[0] if config.roi_offsets else None, config.roi_sizes[0] if config.roi_sizes else None) else: # set up image without a task specified, eg for display np_io.setup_images(filename, series, offset, size) return proc_tasks
def process_tasks(): """Process command-line tasks. Perform tasks set by the ``--proc`` parameter or any other entry point, such as ``--register`` tasks. Only the first identified task will be performed. """ # if command-line driven task specified, start task and shut down if config.register_type: register.main() elif config.notify_url: notify.main() elif config.plot_2d_type: plot_2d.main() elif config.df_task: df_io.main() elif config.grid_search_profile: _grid_search(config.series_list) elif config.ec2_list or config.ec2_start or config.ec2_terminate: # defer importing AWS module to avoid making its dependencies # required for MagellanMapper from magmap.cloud import aws aws.main() else: if config.filename: for series in config.series_list: # process files for each series, typically a tile within a # microscopy image set or a single whole image filename, offset, size, reg_suffixes = \ importer.deconstruct_img_name(config.filename) set_subimg, _ = importer.parse_deconstructed_name( filename, offset, size, reg_suffixes) if not set_subimg: # sub-image parameters set in filename takes precedence for # the loaded image, but fall back to user-supplied args offset = (config.subimg_offsets[0] if config.subimg_offsets else None) size = (config.subimg_sizes[0] if config.subimg_sizes else None) np_io.setup_images(filename, series, offset, size, config.proc_type) process_file( filename, config.proc_type, series, offset, size, config.roi_offsets[0] if config.roi_offsets else None, config.roi_sizes[0] if config.roi_sizes else None) else: print("No image filename set for processing files, skipping") proc_type = libmag.get_enum(config.proc_type, config.ProcessTypes) if proc_type is None or proc_type is config.ProcessTypes.LOAD: # do not shut down since not a command-line task or if loading files return shutdown()
def _process_files(series_list): # wrapper to process files for each series, typically a tile within # an microscopy image set or a single whole image, setting up the # image before each processing if not config.filename: print("No image filename set for processing files, skipping") return for series in series_list: # process each series offset = config.subimg_offsets[0] if config.subimg_offsets else None size = config.subimg_sizes[0] if config.subimg_sizes else None np_io.setup_images(config.filename, series, offset, size, config.proc_type) process_file(config.filename, config.proc_type, series, offset, size, config.roi_offsets[0] if config.roi_offsets else None, config.roi_sizes[0] if config.roi_sizes else None)
def _detect_subimgs( path: str, series: int, subimg_offsets: List[List[int]], subimg_sizes: List[List[int]] ) -> Tuple[Union[np.ndarray, Any], List[str]]: """Detect blobs in an image across sub-image offsets. Args: path: Path to image from which MagellanMapper-style paths will be generated. series: Image series number. subimg_offsets: Nested list of sub-image offset sets given as ``[[offset_z1, offset_y1, offset_x1], ...]``. subimg_sizes: Nested list of sub-image size sets given as ``[[offset_z1, offset_y1, offset_x1], ...]`` and corresponding to ``subimg_offsets``. Returns: Summed stats array and concatenated summaries. """ stat = np.zeros(3) # use whole image if sub-image parameters are not set if subimg_offsets is None: subimg_offsets = [None] if subimg_sizes is None: subimg_sizes = [None] roi_sizes_len = len(subimg_sizes) summaries = [] for i in range(len(subimg_offsets)): size = (subimg_sizes[i] if roi_sizes_len > 1 else subimg_sizes[0]) np_io.setup_images(path, series, subimg_offsets[i], size) stat_roi, fdbk, _ = stack_detect.detect_blobs_stack( importer.filename_to_base(path, series), subimg_offsets[i], size) if stat_roi is not None: stat = np.add(stat, stat_roi) summaries.append("Offset {}:\n{}".format(subimg_offsets[i], fdbk)) return stat, summaries
def _iterate_file_processing(path, series, subimg_offsets, subimg_sizes): """Processes files iteratively based on offsets. Args: path (str): Path to image from which MagellanMapper-style paths will be generated. series (int): Image series number. subimg_offsets (List[List[int]]): Nested list of sub-image offset sets given as ``[[offset_z1, offset_y1, offset_x1], ...]``. subimg_sizes (List[List[int]]): Nested list of sub-image size sets given as ``[[offset_z1, offset_y1, offset_x1], ...]`` and corresponding to ``subimg_offsets``. Returns: :obj:`np.ndarray`, str: Summed stats array and concatenated summaries. """ stat = np.zeros(3) # use whole image if sub-image parameters are not set if subimg_offsets is None: subimg_offsets = [None] if subimg_sizes is None: subimg_sizes = [None] roi_sizes_len = len(subimg_sizes) summaries = [] for i in range(len(subimg_offsets)): size = (subimg_sizes[i] if roi_sizes_len > 1 else subimg_sizes[0]) np_io.setup_images(path, series, subimg_offsets[i], size, config.proc_type) stat_roi, fdbk = process_file(path, config.proc_type, series, subimg_offsets[i], size) if stat_roi is not None: stat = np.add(stat, stat_roi) summaries.append("Offset {}:\n{}".format(subimg_offsets[i], fdbk)) return stat, summaries
def plot_clusters_by_label(path, z, suffix=None, show=True, scaling=None): """Plot separate sets of clusters for each label. Args: path (str): Base path to blobs file with clusters. z (int): z-plane to plot. suffix (str): Suffix for ``path``; defaults to None. show (bool): True to show; defaults to True. scaling (List): Sequence of scaling from blobs' coordinate space to that of :attr:`config.labels_img`. """ mod_path = path if suffix is not None: mod_path = libmag.insert_before_ext(path, suffix) blobs = np.load(libmag.combine_paths(mod_path, config.SUFFIX_BLOB_CLUSTERS)) label_ids = np.unique(blobs[:, 3]) fig, gs = plot_support.setup_fig( 1, 1, config.plot_labels[config.PlotLabels.SIZE]) ax = fig.add_subplot(gs[0, 0]) plot_support.hide_axes(ax) # plot underlying atlas np_io.setup_images(mod_path) if config.reg_suffixes[config.RegSuffixes.ATLAS]: # use atlas if explicitly set img = config.image5d else: # default to black background img = np.zeros_like(config.labels_img)[None] stacker = export_stack.setup_stack(img, mod_path, slice_vals=(z, z + 1), labels_imgs=(config.labels_img, config.borders_img)) stacker.build_stack(ax, config.plot_labels[config.PlotLabels.SCALE_BAR]) # export_stack.reg_planes_to_img( # (np.zeros(config.labels_img.shape[1:], dtype=int), # config.labels_img[z]), ax=ax) if scaling is not None: print("scaling blobs cluster coordinates by", scaling) blobs = blobs.astype(float) blobs[:, :3] = np.multiply(blobs[:, :3], scaling) blobs[:, 0] = np.floor(blobs[:, 0]) # plot nuclei by label, colored based on cluster size within each label colors = colormaps.discrete_colormap(len(np.unique(blobs[:, 4])), prioritize_default="cn") / 255. col_noise = (1, 1, 1, 1) for label_id in label_ids: if label_id == 0: # skip blobs in background continue # sort blobs within label by cluster size (descending order), # including clusters within all z-planes to keep same order across zs blobs_lbl = blobs[blobs[:, 3] == label_id] clus_lbls, clus_lbls_counts = np.unique(blobs_lbl[:, 4], return_counts=True) clus_lbls = clus_lbls[np.argsort(clus_lbls_counts)][::-1] blobs_lbl = blobs_lbl[blobs_lbl[:, 0] == z] for i, (clus_lbl, color) in enumerate(zip(clus_lbls, colors)): blobs_clus = blobs_lbl[blobs_lbl[:, 4] == clus_lbl] if len(blobs_clus) < 1: continue # default to small, translucent dominant cluster points size = 0.1 alpha = 0.5 if clus_lbl == -1: # color all noise points the same and emphasize points color = col_noise size = 0.5 alpha = 1 print(label_id, clus_lbl, color, len(blobs_clus)) ax.scatter(blobs_clus[:, 2], blobs_clus[:, 1], color=color, s=size, alpha=alpha) plot_support.save_fig(mod_path, config.savefig, "_clusplot") if show: plot_support.show()
def stack_to_img(paths, roi_offset, roi_size, series=None, subimg_offset=None, subimg_size=None, animated=False, suffix=None): """Build an image file from a stack of images in a directory or an array, exporting as an animated GIF or movie for multiple planes or extracting a single plane to a standard image file format. Writes the file to the parent directory of path. Args: paths (List[str]): Image paths, which can each be either an image directory or a base path to a single image, including volumetric images. roi_offset (Sequence[int]): Tuple of offset given in user order ``x,y,z``; defaults to None. Requires ``roi_size`` to not be None. roi_size (Sequence[int]): Size of the region of interest in user order ``x,y,z``; defaults to None. Requires ``roi_offset`` to not be None. series (int): Image series number; defaults to None. subimg_offset (List[int]): Sub-image offset as (z,y,x) to load; defaults to None. subimg_size (List[int]): Sub-image size as (z,y,x) to load; defaults to None. animated (bool): True to export as an animated image; defaults to False. suffix (str): String to append to output path before extension; defaults to None to ignore. """ # set up figure layout for collages size = config.plot_labels[config.PlotLabels.LAYOUT] ncols, nrows = size if size else (1, 1) num_paths = len(paths) collage = num_paths > 1 figs = {} for i in range(nrows): for j in range(ncols): n = i * ncols + j if n >= num_paths: break # load an image and set up its image stacker path_sub = paths[n] axs = [] # TODO: test directory of images # TODO: consider not reloading first image np_io.setup_images(path_sub, series, subimg_offset, subimg_size) stacker = setup_stack( config.image5d, path_sub, offset=roi_offset, roi_size=roi_size, slice_vals=config.slice_vals, rescale=config.transform[config.Transforms.RESCALE], labels_imgs=(config.labels_img, config.borders_img)) # add sub-plot title unless groups given as empty string title = None if config.groups: title = libmag.get_if_within(config.groups, n) elif num_paths > 1: title = os.path.basename(path_sub) if not stacker.images: continue ax = None for k in range(len(stacker.images[0])): # create or retrieve fig; animation has only 1 fig planei = 0 if animated else (stacker.img_slice.start + k * stacker.img_slice.step) fig_dict = figs.get(planei) if not fig_dict: # set up new fig fig, gs = plot_support.setup_fig( nrows, ncols, config.plot_labels[config.PlotLabels.SIZE]) fig_dict = {"fig": fig, "gs": gs, "imgs": []} figs[planei] = fig_dict if ax is None: # generate new axes for the gridspec position ax = fig_dict["fig"].add_subplot(fig_dict["gs"][i, j]) if title: ax.title.set_text(title) axs.append(ax) # export planes plotted_imgs = stacker.build_stack( axs, config.plot_labels[config.PlotLabels.SCALE_BAR], size is None or ncols * nrows == 1) if animated: # store all plotted images in single fig fig_dict = figs.get(0) if fig_dict: fig_dict["imgs"] = plotted_imgs else: # store one plotted image per fig; not used currently for fig_dict, img in zip(figs.values(), plotted_imgs): fig_dict["imgs"].append(img) path_base = paths[0] for planei, fig_dict in figs.items(): if animated: # generate animated image (eg animated GIF or movie file) animate_imgs(path_base, fig_dict["imgs"], config.delay, config.savefig, suffix) else: # generate single figure with axis and plane index in filename if collage: # output filename as a collage of images if not os.path.isdir(path_base): path_base = os.path.dirname(path_base) path_base = os.path.join(path_base, "collage") # insert mod as suffix, then add any additional suffix; # can use config.prefix_out for make_out_path prefix mod = "_plane_{}{}".format( plot_support.get_plane_axis(config.plane), planei) out_path = libmag.make_out_path(path_base, suffix=mod) if suffix: out_path = libmag.insert_before_ext(out_path, suffix) plot_support.save_fig(out_path, config.savefig, fig=fig_dict["fig"])
def stack_to_img(paths, roi_offset, roi_size, series=None, subimg_offset=None, subimg_size=None, animated=False, suffix=None): """Build an image file from a stack of images in a directory or an array, exporting as an animated GIF or movie for multiple planes or extracting a single plane to a standard image file format. Writes the file to the parent directory of path. Args: paths (List[str]): Image paths, which can each be either an image directory or a base path to a single image, including volumetric images. roi_offset (Sequence[int]): Tuple of offset given in user order ``x,y,z``; defaults to None. Requires ``roi_size`` to not be None. roi_size (Sequence[int]): Size of the region of interest in user order ``x,y,z``; defaults to None. Requires ``roi_offset`` to not be None. series (int): Image series number; defaults to None. subimg_offset (List[int]): Sub-image offset as (z,y,x) to load; defaults to None. subimg_size (List[int]): Sub-image size as (z,y,x) to load; defaults to None. animated (bool): True to export as an animated image; defaults to False. suffix (str): String to append to output path before extension; defaults to None to ignore. """ size = config.plot_labels[config.PlotLabels.LAYOUT] ncols, nrows = size if size else (1, 1) fig, gs = plot_support.setup_fig( nrows, ncols, config.plot_labels[config.PlotLabels.SIZE]) plotted_imgs = None num_paths = len(paths) for i in range(nrows): for j in range(ncols): n = i * ncols + j if n >= num_paths: break ax = fig.add_subplot(gs[i, j]) path_sub = paths[n] # TODO: test directory of images # TODO: avoid reloading first image np_io.setup_images(path_sub, series, subimg_offset, subimg_size) plotted_imgs = stack_to_ax_imgs( ax, config.image5d, path_sub, offset=roi_offset, roi_size=roi_size, slice_vals=config.slice_vals, rescale=config.transform[config.Transforms.RESCALE], labels_imgs=(config.labels_img, config.borders_img), multiplane=animated, fit=(size is None or ncols * nrows == 1)) path_base = paths[0] if animated: # generate animated image (eg animated GIF or movie file) animate_imgs(path_base, plotted_imgs, config.delay, config.savefig, suffix) else: # save image as single file if roi_offset: # get plane index from coordinate at the given axis in ROI offset planei = roi_offset[::-1][plot_support.get_plane_axis( config.plane, get_index=True)] else: # get plane index from slice start planei = config.slice_vals[0] if num_paths > 1: # output filename as a collage of images if not os.path.isdir(path_base): path_base = os.path.dirname(path_base) path_base = os.path.join(path_base, "collage") mod = "_plane_{}{}".format(plot_support.get_plane_axis(config.plane), planei) if suffix: path_base = libmag.insert_before_ext(path_base, suffix) plot_support.save_fig(path_base, config.savefig, mod)