Exemple #1
0
def build_heat_map(shape, coords):
    """Build a heat map for an image based on point placement within it.
    
    The heat map is scaled at the level of ``shape``, generally assuming 
    that ``coords`` have been scaled from a larger size. In other words, 
    the heat map will show the density at the level of its pixels and 
    can be further rescaled/resized to show density at different resolutions.
    
    Args:
        shape: Shape of image that contains ``coords``.
        coords: Array of coordinates of points. The array 
            should have shape (n, m), where n = number of coordinate sets, 
            and m = number of coordinate dimensions.
    
    Returns:
        :obj:`np.ndaarry`: An image of shape ``shape`` with values
        corresponding to the number of point occurrences at each pixel.
    """
    if coords is not None and len(coords) > 0:
        # get counts of points at the same coordinate as a measure of density
        coords_unique, coords_count = np.unique(coords,
                                                return_counts=True,
                                                axis=0)
        coordsi = libmag.coords_for_indexing(coords_unique)
        dtype = libmag.dtype_within_range(0, np.amax(coords_count), True,
                                          False)
        heat_map = np.zeros(shape, dtype=dtype)
        heat_map[tuple(coordsi)] = coords_count
    else:
        # generate an array with small int type if no coords are available
        heat_map = np.zeros(shape, dtype=np.uint8)
    return heat_map
Exemple #2
0
def sub_segment_labels(labels_img_np, atlas_edge):
    """Sub-segment a labels image into sub-labels based on anatomical 
    boundaries.
    
    Args:
        labels_img_np: Integer labels image as a Numpy array.
        atlas_edge: Numpy array of atlas reduced to binary image of its edges.
    
    Returns:
        Image as a Numpy array of same shape as ``labels_img_np`` with 
        each label sub-segmented based on anatomical boundaries. Labels 
        in this image will correspond to the original labels 
        multiplied by :const:``config.SUB_SEG_MULT`` to make room for 
        sub-labels, which will each be incremented by 1.
    """
    start_time = time()
    
    # use a class to set and process the label without having to 
    # reference the labels image as a global variable
    SubSegmenter.set_images(labels_img_np, atlas_edge)
    
    pool = chunking.get_mp_pool()
    pool_results = []
    label_ids = np.unique(labels_img_np)
    max_val = np.amax(labels_img_np) * (config.SUB_SEG_MULT + 1)
    dtype = libmag.dtype_within_range(-max_val, max_val, True)
    subseg = np.zeros_like(labels_img_np, dtype=dtype)
    
    for label_id in label_ids:
        # skip background
        if label_id == 0: continue
        pool_results.append(
            pool.apply_async(
                SubSegmenter.sub_segment, args=(label_id, dtype)))
    
    for result in pool_results:
        label_id, slices, labels_seg = result.get()
        # can only mutate markers outside of mp for changes to persist
        labels_seg_mask = labels_seg != 0
        subseg[tuple(slices)][labels_seg_mask] = labels_seg[labels_seg_mask]
        print("finished sub-segmenting label ID {}".format(label_id))
    pool.close()
    pool.join()
    
    print("time elapsed to sub-segment labels image:", time() - start_time)
    return subseg
Exemple #3
0
def remove_close_blobs(blobs, blobs_master, tol, chunk_size=1000):
    """Removes blobs that are close to one another.
    
    Args:
        blobs: The blobs to be checked for closeness and pruning, given as 2D 
            array of [n, [z, row, column, ...]].
        blobs_master: The list by which to check for close blobs, in the same
            format as blobs.
        tol: Tolerance to check for closeness, given in the same format
            as region. Blobs that are equal to or less than the the absolute
            difference for all corresponding parameters will be pruned in
            the returned array.
        chunk_size: Max size along first dimension for each blob array 
            to minimize memory consumption; defaults to 1000.
    
    Return:
        Tuple of the blobs array after pruning and ``blobs_master`` with 
        absolute coordinates updated with the average of any 
        corresponding duplicates.
    """
    num_blobs_check = len(blobs)
    num_blobs_master = len(blobs_master)
    if num_blobs_check < 1 or num_blobs_master < 1:
        # no blobs to remove if either array is empty
        return blobs, blobs_master
    
    # smallest type to hold blob coordinates, signed to use for diffs
    dtype = libmag.dtype_within_range(
        0, np.amax((np.amax(blobs[:, :3]), np.amax(blobs_master[:, :3]))), 
        True, True)
    match_check = None
    match_master = None
    
    # chunk both master and check array for consistent max array size; 
    # compare each master chunk to each check chunk and save matches 
    # to prune at end
    i = 0
    while i * chunk_size < num_blobs_master:
        start_master = i * chunk_size
        end_master = (i + 1) * chunk_size
        blobs_ref = blobs_master[start_master:end_master, :3].astype(dtype)
        j = 0
        while j * chunk_size < num_blobs_check:
            start_check = j * chunk_size
            end_check = (j + 1) * chunk_size
            blobs_check = blobs[start_check:end_check].astype(dtype)
            close_master, close = _find_close_blobs(blobs_check, blobs_ref, tol)
            # shift indices by offsets
            close += start_check
            close_master += start_master
            match_check = (close if match_check is None 
                           else np.concatenate((match_check, close)))
            match_master = (close_master if match_master is None 
                            else np.concatenate((match_master, close_master)))
            j += 1
        i += 1
    pruned = np.delete(blobs, match_check, axis=0)
    #if (len(close) > 0): print("{} removed".format(blobs[close][:, 0:4]))
    
    # shift close blobs to their mean values, storing values in the duplicated
    # coordinates and radius of the blob array after the confirmation value;
    # use the duplicated coordinates to work from any prior shifting; 
    # further duplicate testing will still be based on initial position to
    # allow detection of duplicates that occur in multiple ROI pairs
    abs_between = np.around(
        np.divide(
            np.add(get_blob_abs_coords(blobs_master[match_master]), 
                   get_blob_abs_coords(blobs[match_check])), 2))
    blobs_master[match_master] = set_blob_abs_coords(
        blobs_master[match_master], abs_between)
    #print("blobs_master after shifting:\n{}".format(blobs_master[:, 5:9]))
    return pruned, blobs_master
Exemple #4
0
def discrete_colormap(num_colors,
                      alpha=255,
                      prioritize_default=True,
                      seed=None,
                      min_val=0,
                      max_val=255,
                      min_any=0,
                      symmetric_colors=False,
                      dup_offset=0,
                      jitter=0,
                      mode=DiscreteModes.RANDOMN):
    """Make a discrete colormap using :attr:``config.colors`` as the 
    starting colors and filling in the rest with randomly generated RGB values.
    
    Args:
        num_colors (int): Number of discrete colors to generate.
        alpha (int): Transparency level, from 0-255; defaults to 255.
        prioritize_default (bool, str): If True, the default colors from 
            :attr:``config.colors`` will replace the initial colormap elements; 
            defaults to True. Alternatively, `cn` can be given to use 
            the "CN" color spec instead.
        seed (int): Random number seed; defaults to None, in which case no seed 
            will be set.
        min_val (int, float): Minimum value for random numbers; defaults to 0.
        max_val (int, float): Maximum value for random numbers; defaults to 255.
            For floating point ranges such as 0.0-1.0, set as a float.
        min_any (int, float): Minimum value above which at least one value
            must be in each set of RGB values; defaults to 0. If all
            values in an RGB set are below this value, the lowest
            RGB value will be scaled up by the ratio ``max_val:min_any``.
            Assumes a range of ``min_val < min_any < max_val``; defaults to
            0 to ignore.
        symmetric_colors (bool): True to create a symmetric set of colors,
            assuming the first half of ``num_colors`` mirror those of
            the second half; defaults to False.
        dup_offset (int): Amount by which to offset duplicate color values
            if ``dup_for_neg`` is enabled; defaults to 0.
        jitter (int): In :obj:`DiscreteModes.GRID` mode, coordinates are
            randomly shifted by half this value above or below their original
            value; defaults to 0.
        mode (:obj:`DiscreteModes`): Mode given as an enumeration; defaults
            to :obj:`DiscreteModes.RANDOMN` mode.
    
    Returns:
        :obj:`np.ndaarry`: 2D Numpy array in the format 
        ``[[R, G, B, alpha], ...]`` on a 
        scale of 0-255. This colormap will need to be converted into a 
        Matplotlib colormap using ``LinearSegmentedColormap.from_list`` 
        to generate a map that can be used directly in functions such 
        as ``imshow``.
    """
    if symmetric_colors:
        # make room for offset when duplicating colors
        max_val -= dup_offset

    # generate random combination of RGB values for each number of colors,
    # where each value ranges from min-max
    if mode is DiscreteModes.GRID:
        # discrete colors taken from an evenly spaced grid for min separation
        # between color values
        jitters = None
        if jitter > 0:
            if seed is not None: np.random.seed(seed)
            jitters = np.multiply(np.random.random((num_colors, 3)),
                                  jitter - jitter / 2).astype(int)
            max_val -= np.amax(jitters)
            min_val -= np.amin(jitters)
        # TODO: weight chls or scale non-linearly for better visual distinction
        space = (max_val - min_val) // np.cbrt(num_colors)
        sl = slice(min_val, max_val, space)
        grid = np.mgrid[sl, sl, sl]
        coords = np.c_[grid[0].ravel(), grid[1].ravel(), grid[2].ravel()]
        if min_any > 0:
            # remove all coords where all vals are below threshold
            # TODO: account for lost coords in initial space size determination
            coords = coords[~np.all(np.less(coords, min_any), axis=1)]
        if seed is not None: np.random.seed(seed)
        rand = np.random.choice(len(coords), num_colors, replace=False)
        rand_coords = coords[rand]
        if jitters is not None:
            rand_coords = np.add(rand_coords, jitters)
        rand_coords_shape = list(rand_coords.shape)
        rand_coords_shape[-1] += 1
        cmap = np.zeros(rand_coords_shape,
                        dtype=libmag.dtype_within_range(min_val, max_val))
        cmap[:, :-1] = rand_coords
    else:
        # randomly generate each color value; 4th values only for simplicity
        # in generating array with shape for alpha channel
        if seed is not None: np.random.seed(seed)
        cmap = (np.random.random(
            (num_colors, 4)) * (max_val - min_val) + min_val).astype(
                libmag.dtype_within_range(min_val, max_val))
        if min_any > 0:
            # if all vals below threshold, scale up lowest value
            below_offset = np.all(np.less(cmap[:, :3], min_any), axis=1)
            axes = np.argmin(cmap[below_offset, :3], axis=1)
            cmap[below_offset, axes] = np.multiply(cmap[below_offset, axes],
                                                   max_val / min_any)

    if symmetric_colors:
        # invert latter half onto former half, assuming that corresponding
        # labels are mirrored (eg -5, 3, 0, 3, 5), with background centered as 0
        cmap_len = len(cmap)
        mid = cmap_len // 2
        cmap[:mid] = cmap[:cmap_len - mid - 1:-1] + dup_offset
    cmap[:, -1] = alpha  # set transparency
    if prioritize_default is not False:
        # prioritize default colors by replacing first colors with default ones
        colors_default = config.colors
        if prioritize_default == "cn":
            # "CN" color spec
            colors_default = np.multiply(
                [colors.to_rgb("C{}".format(i)) for i in range(10)], 255)
        end = min((num_colors, len(colors_default)))
        cmap[:end, :3] = colors_default[:end]
    return cmap