def split_label(binary): '''Split label using watershed algorithm''' # blur_radius = np.round(np.sqrt(min_size)/8).astype(int) # print blur_radius distance = distance_transform_edt(binary) # distance_blured = gaussian_filter(distance, blur_radius) distance_blured = gaussian_filter(distance, 8) # selem = disk(2) local_maxi = peak_local_max(distance_blured, indices=False, labels=binary, min_distance = 10, exclude_border = False) markers = measure_label(local_maxi) labels_ws = watershed(-distance, markers, mask=binary) # selem_morph = np.array([0,1,0,1,1,1,0,1,0], dtype=bool).reshape((3,3)) # for i in (1,2): # maxi = binary_dilation(local_maxi, selem_morph) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/distance.jpg', distance) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/maxi.jpg', local_maxi*255) return labels_ws
def label_nuclei(binary, min_size): '''Label, watershed and remove small objects''' distance = medial_axis(binary, return_distance=True)[1] distance_blured = gaussian_filter(distance, 5) local_maxi = peak_local_max(distance_blured, indices=False, labels=binary, min_distance = 30) markers = measure_label(local_maxi) # markers[~binary] = -1 # labels_rw = segmentation.random_walker(binary, markers) # labels_rw[labels_rw == -1] = 0 # labels_rw = segmentation.relabel_sequential(labels_rw) labels_ws = watershed(-distance, markers, mask=binary) labels_large = remove_small_objects(labels_ws,min_size) labels_clean_border = clear_border(labels_large) labels_from_one = relabel_sequential(labels_clean_border) # plt.imshow(ndimage.morphology.binary_dilation(markers)) # plt.show() return labels_from_one[0]
def binarize_adaptive(pic_source): # binary = threshold_adaptive(pic_source,1000,param = 5.) koef = 0.2 radius = 10 thres_glb = global_otsu(pic_source) thres_loc = local_otsu(pic_source, disk(radius)) thres_loc[thres_loc < thres_glb*(1-koef)] = thres_glb*(1-koef) # thres_loc[thres_loc > thres_glb*(1+koef)] = thres_glb*(1+koef) binary = pic_source > thres_loc # binary = binary_fill_holes(binary) labels = measure_label(binary) labelcount = np.bincount(labels.ravel()) bg = np.argmax(labelcount) binary[labels != bg] = True # bin_glb = pic_source > thres_glb # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/binary.jpg', binary) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/binary_global.jpg', bin_glb) return binary
def label_nuclei(binary, min_size): '''Label, watershed and remove small objects''' distance = medial_axis(binary, return_distance=True)[1] distance_blured = gaussian_filter(distance, 5) local_maxi = peak_local_max(distance_blured, indices=False, labels=binary, min_distance=30) markers = measure_label(local_maxi) # markers[~binary] = -1 # labels_rw = segmentation.random_walker(binary, markers) # labels_rw[labels_rw == -1] = 0 # labels_rw = segmentation.relabel_sequential(labels_rw) labels_ws = watershed(-distance, markers, mask=binary) labels_large = remove_small_objects(labels_ws, min_size) labels_clean_border = clear_border(labels_large) labels_from_one = relabel_sequential(labels_clean_border) # plt.imshow(ndimage.morphology.binary_dilation(markers)) # plt.show() return labels_from_one[0]
def binarize_canny(pic_source, sensitivity = 5.): ht = 5. + ((10 - sensitivity)/5.)*20. # print ht edges = canny_filter(pic_source, sigma = 3, high_threshold = ht, low_threshold = 2.) selem_morph = np.array([0,1,0,1,1,1,0,1,0], dtype=bool).reshape((3,3)) for i in (1,2): edges = binary_dilation(edges, selem_morph) # misc.imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/edges.jpg', edges) # binary = ndimage.binary_fill_holes(edges) labels = measure_label(edges) labelcount = np.bincount(labels.ravel()) bg = np.argmax(labelcount) edges[labels != bg] = 255 selem_med = np.ones((3,3), dtype = bool) binary = median_filter(edges, selem_med) for i in (1,2,3): binary = binary_erosion(edges, selem_morph) return edges
def split_label(binary): '''Split label using watershed algorithm''' # blur_radius = np.round(np.sqrt(min_size)/8).astype(int) # print blur_radius distance = distance_transform_edt(binary) # distance_blured = gaussian_filter(distance, blur_radius) distance_blured = gaussian_filter(distance, 8) # selem = disk(2) local_maxi = peak_local_max(distance_blured, indices=False, labels=binary, min_distance=10, exclude_border=False) markers = measure_label(local_maxi) labels_ws = watershed(-distance, markers, mask=binary) # selem_morph = np.array([0,1,0,1,1,1,0,1,0], dtype=bool).reshape((3,3)) # for i in (1,2): # maxi = binary_dilation(local_maxi, selem_morph) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/distance.jpg', distance) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/maxi.jpg', local_maxi*255) return labels_ws
def load_cell_image(self, sensitivity = 5., min_cell_size = 4000): '''Load cell image and add cells to self''' pic_nuclei = self.get_source_pic_nuclei() self.shape = pic_nuclei.shape nuclei = find_nuclei(pic_nuclei, sensitivity, min_cell_size) self.cell_detect_params = (sensitivity, min_cell_size) labels = measure_label(nuclei) labelcount = np.bincount(labels.ravel()) bg = np.argmax(labelcount) labels += 1 labels[labels == bg + 1] = 0 labels = remove_small_objects(labels, min_cell_size) self.nuclei = labels self.create_cells_from_nuclei(pic_nuclei) self.rescale_nuclei()
def binarize_adaptive(pic_source): # binary = threshold_adaptive(pic_source,1000,param = 5.) koef = 0.2 radius = 10 thres_glb = global_otsu(pic_source) thres_loc = local_otsu(pic_source, disk(radius)) thres_loc[thres_loc < thres_glb * (1 - koef)] = thres_glb * (1 - koef) # thres_loc[thres_loc > thres_glb*(1+koef)] = thres_glb*(1+koef) binary = pic_source > thres_loc # binary = binary_fill_holes(binary) labels = measure_label(binary) labelcount = np.bincount(labels.ravel()) bg = np.argmax(labelcount) binary[labels != bg] = True # bin_glb = pic_source > thres_glb # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/binary.jpg', binary) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/binary_global.jpg', bin_glb) return binary
def get_roundness_filter_indices(mask: torch.Tensor, threshold: float): r"""Filter by roundness, where roundness = (4 pi area) / perimeter^2""" # Loop over images indices = [] num_pixels = mask.shape[-1] * mask.shape[-2] for i, m in enumerate(mask.numpy()): # Get connected components component, num = measure_label(m, return_num=True, background=0) if num == 0: return 1000000 # Get area of biggest connected component areas, perimeters = [], [] for i in range(1, num + 1): component_i = (component == i) area = np.sum(component_i) perimeter = measure_perimeter(component_i) areas.append(area) perimeters.append(perimeter) max_component = np.argmax(areas) max_component_area = areas[max_component] if num_pixels * 0.05 < max_component_area < num_pixels * 0.90: max_component_perimeter = perimeters[max_component] roundness = max_component_area / max_component_perimeter**2 indices.append(roundness > threshold) else: indices.append(False) return torch.tensor(indices)
def clean_up_mask(mask, closing_iterations=4, area_factor=1.5): ''' Cleaning up the segmentation of cells by: 1) Removing small holes. This somwwhat controlled by "closing_iterations" parameter. More iterations will fill larger holes, but will ultimately cause wierd object shapes. 2) Excluding small objects. Objects with a size of mu - area_factor*std (mu: average object area, std: standard deviation of the object area) are excluded. You can choose the area_factor; a high factor will result in less objects beeing removed. :param mask: mask of cells :param closing_iterations: number of iterations during a binary_closing operation :param area_factor: Factor defining the threshold to exclude small objects. A large area_factor allows smaller objects (see above) :return: ''' # binary closing mask_clean = copy.deepcopy(mask) mask_clean = binary_dilation(mask_clean, iterations=closing_iterations) mask_clean = binary_erosion(mask_clean, iterations=closing_iterations) # filling holes mask_clean = binary_fill_holes(mask_clean) # excluding small areas labeled = measure_label(mask_clean) regions = regionprops(labeled) areas = [r.area for r in regions] mu = np.mean(areas) std = np.std(areas, ddof=1) mask_clean = remove_small_objects(mask_clean, mu - area_factor * std) return mask_clean
def get_markers(foci_pic, nucleus, peak_min_val_perc = 60): '''Return foci markers''' # foci_pic_blured = img_as_ubyte(gaussian_filter(foci_pic, 1)) foci_pic_blured = np.floor(gaussian_filter(foci_pic, 1)*255).astype(np.uint8) foci_values = np.extract(nucleus, foci_pic) min_peak_val = np.percentile(foci_values, (peak_min_val_perc)) local_maxi = peak_local_max(foci_pic_blured, min_distance=5, threshold_abs=min_peak_val, indices=False, labels=nucleus) return measure_label(local_maxi)
def detect_dog(img, gauss_1=1, gauss_2=2, threshold="otsu", exclude_close_to_edge=False, threshold_factor=1): ''' Segmentation (=identifying the area of cells). The image is bandpass-filtered (removing large/unsharp objects and small objects). Then the cell area is identified by thresholding. You can use otsus method for thresholding ("otsu"), a threshodl based on the histogram of pixels ("mean_std") or use a fixed threshold ("absolute"). You can also increase or decrease all thresholds with a factor (threshold_factor). If you choose "absolute", the threshold is set to 1 and you can only change it with the threshold_factor. :param img: Np.ndarray; Image, e.g. the maximums projection. :param gauss_1: lower size for the bandpass filter :param gauss_2: upper size for the bandpass filter :param threshold: Method of thresholding. Possible values are "otsu","mean_std" and "absolute". :param exclude_close_to_edge: boolean; Choose if cells close to the image edge are ignored. (Probably not necessary) :param threshold_factor: Additional factor for the threshold. :return: ''' th = None img2 = gaussian(img, gauss_1) - gaussian(img, gauss_2) if threshold == "otsu": th = threshold_otsu(img2) if threshold == "mean_std": mu, std = np.mean(np.ravel(img2)), np.std(np.ravel(img2), ddof=1) th = mu + 5 * std if threshold == "absolute": th = 1 mask = img2 > th * threshold_factor labeled = measure_label(mask) regions = regionprops(labeled, intensity_image=img2) detections = [] for r in regions: y, x = r.weighted_centroid # optional filtering all detection close to the image edge close_to_edge = not ((75 < x < img.shape[1] - 75) and (75 < y < img.shape[0] - 75)) if not close_to_edge or not exclude_close_to_edge: detections.append((x, y)) else: mask[mask == r.label] = 0 # removing label from mask detections = np.array(detections) return mask, detections
def binarize_canny(pic_source, sensitivity=5.): ht = 5. + ((10 - sensitivity) / 5.) * 25. lt = (10 - sensitivity) * 2. # print ht sharp = sharpen_image(pic_source) edges = canny_filter(sharp, sigma=1, high_threshold=ht, low_threshold=lt) selem_morph = np.array([0, 1, 0, 1, 1, 1, 0, 1, 0], dtype=bool).reshape( (3, 3)) for i in (1, 2): # for i in (1,2): edges = binary_dilation(edges, selem_morph) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/edges.jpg', (edges*255).astype(np.uint8)) # edges = binary_fill_holes(edges) labels = measure_label(edges) labelcount = np.bincount(labels.ravel()) bg = np.argmax(labelcount) edges[labels != bg] = True # selem_med = np.ones((3,3), dtype = bool) # binary = median_filter(edges, selem_med) for i in (1, 2, 3, 4): # for i in (1,2,3): binary = binary_erosion(edges, selem_morph) # binary = binary_erosion(binary, selem_morph) for i in (1, 2): binary = binary_dilation(binary, selem_morph) return binary
def get_markers(foci_pic, nucleus, peak_min_val_perc=60): '''Return foci markers''' # foci_pic_blured = img_as_ubyte(gaussian_filter(foci_pic, 1)) foci_pic_blured = np.floor(gaussian_filter(foci_pic, 1) * 255).astype( np.uint8) foci_values = np.extract(nucleus, foci_pic) min_peak_val = np.percentile(foci_values, (peak_min_val_perc)) local_maxi = peak_local_max(foci_pic_blured, min_distance=5, threshold_abs=min_peak_val, indices=False, labels=nucleus) return measure_label(local_maxi)
def binarize_canny(pic_source, sensitivity = 5.): ht = 5. + ((10 - sensitivity)/5.)*25. lt = (10 - sensitivity)*2. # print ht sharp = sharpen_image(pic_source) edges = canny_filter(sharp, sigma = 1, high_threshold = ht, low_threshold = lt) selem_morph = np.array([0,1,0,1,1,1,0,1,0], dtype=bool).reshape((3,3)) for i in (1,2): # for i in (1,2): edges = binary_dilation(edges, selem_morph) # imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/edges.jpg', (edges*255).astype(np.uint8)) # edges = binary_fill_holes(edges) labels = measure_label(edges) labelcount = np.bincount(labels.ravel()) bg = np.argmax(labelcount) edges[labels != bg] = True # selem_med = np.ones((3,3), dtype = bool) # binary = median_filter(edges, selem_med) for i in (1,2,3,4): # for i in (1,2,3): binary = binary_erosion(edges, selem_morph) # binary = binary_erosion(binary, selem_morph) for i in (1,2): binary = binary_dilation(binary, selem_morph) return binary
def apply_connected_components_(m: np.ndarray, threshold: float): """Return masks with small connected components removed""" # Get connected components component, num = measure_label(m, return_num=True, background=0) areas = np.zeros([num + 1]) for comp in range(1, num + 1, 1): areas[comp] = np.sum(component == comp) # Get area of biggest connected component max_component = np.argmax(areas) max_component_area = areas[max_component] # Create new mask (in-place) with filtered connected components m *= 0 for comp in range(1, num + 1, 1): area = areas[comp] if float(area) / max_component_area > threshold: m[component == comp] = True return m
def get_max_indices_and_position(mask, max_indices): ''' Estimating the z-position of cells from a segmentation mask. individual objects are identified by labeling, then the z-position is calculated by taking the mean of the maximum-indices in the area of each objects. This also returns the x-y-positions of cells by calculating the centroid of each object. Additionally it calculates the standard deviation of the maximum indices. A large standard is a signe for problems :param mask: Boolean-segmentation mask :param max_indices: map of maximum indices :return: ''' labeled = measure_label(mask) regions = regionprops(labeled) max_indices_list = [] index_variation = [] pos_list = [] for r in regions: max_indices_list.append( np.mean(max_indices[r.coords[:, 0], r.coords[:, 1]])) index_variation.append( np.std(max_indices[r.coords[:, 0], r.coords[:, 1]])) pos_list.append(r.centroid) return max_indices_list, index_variation, pos_list
def binarize_canny(pic_source, sensitivity=5.): ht = 5. + ((10 - sensitivity) / 5.) * 20. # print ht edges = canny_filter(pic_source, sigma=3, high_threshold=ht, low_threshold=2.) selem_morph = np.array([0, 1, 0, 1, 1, 1, 0, 1, 0], dtype=bool).reshape( (3, 3)) for i in (1, 2): edges = binary_dilation(edges, selem_morph) # misc.imsave('/home/varnivey/Data/Biophys/Burnazyan/Experiments/fluor_calc/test/edges.jpg', edges) # binary = ndimage.binary_fill_holes(edges) labels = measure_label(edges) labelcount = np.bincount(labels.ravel()) bg = np.argmax(labelcount) edges[labels != bg] = 255 selem_med = np.ones((3, 3), dtype=bool) binary = median_filter(edges, selem_med) for i in (1, 2, 3): binary = binary_erosion(edges, selem_morph) return edges
def labelizer_image(self, image): return measure_label(image, background=0, return_num=True)