def stk_to_rois(stk, threshold, min_size, max_window=8, downscale_factor=2): thresholded_stk = stk > threshold thresholded_stk = remove_small_objects(thresholded_stk, min_size) distance = ndi.distance_transform_edt(thresholded_stk) cropped_stk = stk.copy() cropped_stk[np.logical_not(thresholded_stk)] = 0 combined_stk = cropped_stk + distance/distance.max() local_max = peak_local_max(combined_stk, indices=False, footprint=np.ones((max_window, max_window)), labels=thresholded_stk) markers = ndi.label(local_max)[0] labels = watershed(-combined_stk, markers, mask=thresholded_stk) new_markers = markers.copy() for i in set(labels.flatten()): if i == 0: continue if np.sum(labels==i) < min_size: new_markers[markers==i] = 0 labels = watershed(-combined_stk, new_markers, mask=thresholded_stk) labels_set = set(labels.flatten()) rois = [] for label in labels_set: if label == 0: continue if np.sum((labels==label).astype(int)) < min_size: continue nroi = np.zeros((stk.shape[0], stk.shape[1])) cx,cy = np.where(labels==label) cx,cy = int(cx.mean()), int(cy.mean()) x,y = np.ogrid[0:nroi.shape[0], 0:nroi.shape[1]] r = 4 mask = (cx-x)**2 + (cy-y)**2 <= r*r nroi[mask] = 1 #nroi[labels==label] = 1 rois.append(zoom(nroi, downscale_factor, order=0)) rois = np.array(rois) return rois, thresholded_stk, labels
def do_watershed(image, markers, tfile, shape, bstruct, algorithm, mg_size, use_ww_wl, wl, ww, q): mask = np.memmap(tfile, shape=shape, dtype='uint8', mode='r+') if use_ww_wl: if algorithm == 'Watershed': tmp_image = ndimage.morphological_gradient( get_LUT_value(image, ww, wl).astype('uint16'), mg_size) tmp_mask = watershed(tmp_image, markers.astype('int16'), bstruct) else: tmp_image = get_LUT_value(image, ww, wl).astype('uint16') #tmp_image = ndimage.gaussian_filter(tmp_image, self.config.mg_size) #tmp_image = ndimage.morphological_gradient( #get_LUT_value(image, ww, wl).astype('uint16'), #self.config.mg_size) tmp_mask = watershed_ift(tmp_image, markers.astype('int16'), bstruct) else: if algorithm == 'Watershed': tmp_image = ndimage.morphological_gradient((image - image.min()).astype('uint16'), mg_size) tmp_mask = watershed(tmp_image, markers.astype('int16'), bstruct) else: tmp_image = (image - image.min()).astype('uint16') #tmp_image = ndimage.gaussian_filter(tmp_image, self.config.mg_size) #tmp_image = ndimage.morphological_gradient((image - image.min()).astype('uint16'), self.config.mg_size) tmp_mask = watershed_ift(tmp_image, markers.astype('int8'), bstruct) mask[:] = tmp_mask mask.flush() q.put(1)
def analyse(self, **kwargs): image_object = kwargs['image'] if image_object is None: raise RuntimeError() # Read the image image = cv2.imread(self.image_utils.getOutputFilename(image_object.id)) if image is None: print('File not found') return # Work on green channel gray = image[:, :, 1] # Apply otsu thresholding thresh = filters.threshold_otsu(gray) gray[gray < thresh] = 0 # Apply histogram equalization gray = exposure.equalize_adapthist(gray) * 255 # Create elevation map elevation_map = filters.sobel(gray) gray = gray.astype(int) # Create cell markers markers = numpy.zeros_like(gray) markers[gray < 100] = 2 # seen as white in plot markers[gray > 150] = 1 # seen as black in plot # Segment with watershed using elevation map segmentation = morphology.watershed(elevation_map, markers) segmentation = ndi.binary_fill_holes(segmentation - 1) # labeled_image, n = ndi.label(segmentation) # Watershed with distance transform kernel = numpy.ones((5, 5), numpy.uint8) distance = ndi.distance_transform_edt(segmentation) distance2 = cv2.erode(distance, kernel) distance2 = cv2.dilate(distance2, kernel) local_max = peak_local_max(distance2, num_peaks=1, indices=False, labels=segmentation) markers2 = ndi.label(local_max)[0] labels = morphology.watershed(-distance2, markers2, mask=segmentation) # Extract regions (caching signifies more memory use) regions = regionprops(labels, cache=True) # Filter out big wrong regions regions = [region for region in regions if region.area < 2000] # Set result result = str(len(regions)) return result
def run(self, workspace): labeled_nuclei = workspace.object_set.get_objects(self.primary_objects.value).get_segmented() cell_image = workspace.image_set.get_image(self.image_name.value).pixel_data[:,:] image_collection = [] cell_treshold = otsu(cell_image, min_threshold=0, max_threshold=1) cell_binary = (cell_image >= cell_treshold) cell_distance = scipym.distance_transform_edt(cell_binary).astype(np.uint16) cell_labeled = skm.watershed(-cell_distance, labeled_nuclei, mask=cell_binary) # #fil hall and filter on syze the object in cell_labeled # cell_labeled = self.filter_on_border(cell_labeled) cell_labeled = fill_labeled_holes(cell_labeled) objects = cellprofiler.objects.Objects() objects.segmented = cell_labeled objects.parent_image = cell_image workspace.object_set.add_objects(objects, self.object_name.value) image_collection.append((cell_image, "Original")) image_collection.append((cell_labeled, "Labelized image")) workspace.display_data.image_collection = image_collection
def __call__(self, image, window_size=10, threshold=0, fill_holes=True, outline_smoothing=2, remove_borderobjects=True, size_min=1, *args, **kw): thresh = threshold_adaptive(image, block_size=window_size, offset=-1*threshold) if outline_smoothing >= 1: thresh = outlineSmoothing(thresh, outline_smoothing) thresh = remove_small_objects(thresh, size_min) seeds = ndi.label(clear_border(~thresh))[0] thresh = ndi.binary_fill_holes(thresh) smask = seeds.astype(bool) # object don't touch border after outline smoothing if remove_borderobjects: thresh = clear_border(thresh) img = np.zeros(thresh.shape) img[~smask] = 1 edt = ndi.morphology.distance_transform_edt(img) edt -= ndi.morphology.distance_transform_edt(seeds) labels = watershed(edt, seeds) labels[smask] = 0 labels[~thresh] = 0 return labels
def watershed_with_seeds(image, seed_image, mask_image=None, name='watershed'): """Perform watershed segmentation from given seeds. Inputs should be of the form: image : grayscale image, with higher values representing more signal seed_image : grayscale image where each pixel value represents a unique region""" if mask_image is None: mask = None else: mask = mask_image.image_array # We multiply the image by -1 because the algorithm implementation expects # higher values to be easier for the 'water' to pass segmented = watershed(-image.image_array, seed_image.image_array, mask=mask) ia = ImageArray(segmented, name) ia.history = image.history + [name] return ia
def segment(self, src): image = src.ndarray[:] if self.use_adaptive_threshold: block_size = 25 markers = threshold_adaptive(image, block_size) * 255 markers = invert(markers) else: markers = zeros_like(image) markers[image < self.threshold_low] = 1 markers[image > self.threshold_high] = 255 elmap = sobel(image, mask=image) wsrc = watershed(elmap, markers, mask=image) # elmap = ndimage.distance_transform_edt(image) # local_maxi = is_local_maximum(elmap, image, # ones((3, 3)) # ) # markers = ndimage.label(local_maxi)[0] # wsrc = watershed(-elmap, markers, mask=image) # fwsrc = ndimage.binary_fill_holes(out) # return wsrc if self.use_inverted_image: out = invert(wsrc) else: out = wsrc # time.sleep(1) # do_later(lambda:self.show_image(image, -elmap, out)) return out
def label_nuclei(binary, min_size): '''Label, watershed and remove small objects''' distance = medial_axis(binary, return_distance=True)[1] distance_blured = gaussian_filter(distance, 5) local_maxi = peak_local_max(distance_blured, indices=False, labels=binary, min_distance = 30) markers = measure_label(local_maxi) # markers[~binary] = -1 # labels_rw = segmentation.random_walker(binary, markers) # labels_rw[labels_rw == -1] = 0 # labels_rw = segmentation.relabel_sequential(labels_rw) labels_ws = watershed(-distance, markers, mask=binary) labels_large = remove_small_objects(labels_ws,min_size) labels_clean_border = clear_border(labels_large) labels_from_one = relabel_sequential(labels_clean_border) # plt.imshow(ndimage.morphology.binary_dilation(markers)) # plt.show() return labels_from_one[0]
def segment(self, src): ''' pychron: preprocessing cv.Mat ''' # image = pychron.ndarray[:] # image = asarray(pychron) image = src[:] if self.use_adaptive_threshold: # block_size = 25 markers = threshold_adaptive(image, self.block_size) n = markers[:].astype('uint8') n[markers == True] = 255 n[markers == False] = 1 markers = n else: markers = zeros_like(image) markers[image < self.threshold_low] = 1 markers[image > self.threshold_high] = 255 elmap = sobel(image, mask=image) wsrc = watershed(elmap, markers, mask=image) # wsrc = wsrc.astype('uint8') return invert(wsrc)
def segment_out_cells(base): # TODO: try using OTSU for GFP thresholding sel_elem = disk(2) gfp_collector = np.sum(base, axis=0) gfp_clustering_markers = np.zeros(gfp_collector.shape, dtype=np.uint8) # random walker segment gfp_clustering_markers[gfp_collector > np.mean(gfp_collector) * 2] = 2 gfp_clustering_markers[gfp_collector < np.mean(gfp_collector) * 0.20] = 1 labels = random_walker(gfp_collector, gfp_clustering_markers, beta=10, mode='bf') # round up the labels and set the background to 0 from 1 labels = closing(labels, sel_elem) labels -= 1 # prepare distances for the watershed distance = ndi.distance_transform_edt(labels) local_maxi = peak_local_max(distance, indices=False, # we want the image mask, not peak position min_distance=10, # about half of a bud with our size threshold_abs=10, # allows to clear the noise labels=labels) # we fuse the labels that are close together that escaped the min distance in local_maxi local_maxi = ndi.convolve(local_maxi, np.ones((5, 5)), mode='constant', cval=0.0) # finish the watershed expanded_maxi_markers = ndi.label(local_maxi, structure=np.ones((3, 3)))[0] segmented_cells_labels = watershed(-distance, expanded_maxi_markers, mask=labels) # log debugging data running_debug_frame.gfp_collector = gfp_collector running_debug_frame.gfp_clustering_markers = gfp_clustering_markers running_debug_frame.labels = labels running_debug_frame.segmented_cells_labels = segmented_cells_labels return gfp_collector, segmented_cells_labels
def split_object(self, labeled_image): """ split object when it's necessary """ labeled_image = labeled_image.astype(np.uint16) labeled_mask = np.zeros_like(labeled_image, dtype=np.uint16) labeled_mask[labeled_image != 0] = 1 #ift structuring element about center point. This only affects eccentric structuring elements (i.e. selem with even num=============================== labeled_image = skr.median(labeled_image, skm.disk(4)) labeled_mask = np.zeros_like(labeled_image, dtype=np.uint16) labeled_mask[labeled_image != 0] = 1 distance = scipym.distance_transform_edt(labeled_image).astype(np.uint16) #======================================================================= # binary = np.zeros(np.shape(labeled_image)) # binary[labeled_image > 0] = 1 #======================================================================= distance = skr.mean(distance, skm.disk(15)) l_max = skr.maximum(distance, skm.disk(5)) #l_max = skf.peak_local_max(distance, indices=False,labels=labeled_image, footprint=np.ones((3,3))) l_max = l_max - distance <= 0 l_max = skr.maximum(l_max.astype(np.uint8), skm.disk(6)) marker = ndimage.label(l_max)[0] split_image = skm.watershed(-distance, marker) split_image[split_image[0,0] == split_image] = 0 return split_image
def _segment_watershed(image): elevation_map = sobel(image) markers = np.zeros(image.shape) # initialize markers as zero array # determine thresholds for markers sorted_pixels = np.sort(image, axis=None) max_int = np.mean(sorted_pixels[-10:]) min_int = np.mean(sorted_pixels[:10]) #max_int = np.max(orig_image) #min_int = np.min(orig_image) alpha_min = 0.01 alpha_max = 0.4 thresh_background = (1-alpha_min)*min_int + alpha_min*max_int thresh_spots = (1-alpha_max)*min_int + alpha_max*max_int markers[image < thresh_background] = 1 # mark background markers[image > thresh_spots] = 2 # mark background segmentation = watershed(elevation_map, markers) segmentation = segmentation-1 segmentation = ndi.binary_fill_holes(segmentation) # fill holes return segmentation
def testSkimage(): img = Image.open('../img/1.png') img = np.array(img) imggray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # (thresh, imgbw) = cv2.threshold(imggray, 128, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU) # canny detector # from skimage.feature import canny # edges = canny(imggray/ 255.) from scipy import ndimage as ndi # fill_imgbw = ndi.binary_fill_holes(edges) # label_objects, nb_labels = ndi.label(fill_imgbw) # sizes = np.bincount(label_objects.ravel()) # mask_sizes = sizes > 20 # mask_sizes[0] = 0 # cleaned_imgbw = mask_sizes[label_objects] markers = np.zeros_like(imggray) markers[imggray < 120] = 1 markers[imggray > 150] = 2 from skimage.filters import sobel elevation_map = sobel(imggray) from skimage.morphology import watershed segmentation = watershed(elevation_map, markers) # from skimage.color import label2rgb # segmentation = ndi.binary_fill_holes(segmentation - 10) # labeled_coins, _ = ndi.label(segmentation) # image_label_overlay = label2rgb(labeled_coins, image=imggray) plt.imshow(segmentation, cmap='gray') plt.show() return
def detectOpticDisc(image): kernel = octagon(10, 10) thresh = threshold_otsu(image[:,:,1]) binary = image > thresh print binary.dtype luminance = convertToHLS(image)[:,:,2] t = threshold_otsu(luminance) t = erosion(luminance, kernel) labels = segmentation.slic(image[:,:,1], n_segments = 3) out = color.label2rgb(labels, image[:,:,1], kind='avg') skio.imshow(out) x, y = computeCentroid(t) print x, y rows, cols, _ = image.shape p1 = closing(image[:,:,1],kernel) p2 = opening(p1, kernel) p3 = reconstruction(p2, p1, 'dilation') p3 = p3.astype(np.uint8) #g = dilation(p3, kernel)-erosion(p3, kernel) #g = rank.gradient(p3, disk(5)) g = cv2.morphologyEx(p3, cv2.MORPH_GRADIENT, kernel) #markers = rank.gradient(p3, disk(5)) < 10 markers = drawCircle(rows, cols, x, y, 85) #markers = ndimage.label(markers)[0] #skio.imshow(markers) g = g.astype(np.uint8) #g = cv2.cvtColor(g, cv2.COLOR_GRAY2RGB) w = watershed(g, markers) print np.max(w), np.min(w) w = w.astype(np.uint8) #skio.imshow(w) return w
def segment(self, image): """ """ # image = src[:] if self.use_adaptive_threshold: bs = self.blocksize if not bs % 2: bs += 1 markers = threshold_adaptive(image, bs) # n = markers[:].astype('uint8') n = markers.astype('uint8') # n[markers] = 255 # n[invert(markers)] = 1 # markers = n return n else: markers = zeros_like(image) # print('image',image.max(), image.min()) # print('le', image<self.threshold_low) # print('ge', image>self.threshold_high) markers[image <= self.threshold_low] = 1 markers[image > self.threshold_high] = 255 #elmap = sobel(image, mask=image) elmap = canny(image, sigma=1) wsrc = watershed(elmap, markers, mask=image) return invert(wsrc)
def black_background(image, kernel): shifted = cv2.pyrMeanShiftFiltering(image, 10, 39) gray = cv2.cvtColor(shifted, cv2.COLOR_BGR2GRAY) thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)[1] D = ndimage.distance_transform_edt(thresh) localMax = peak_local_max(D, indices=False, min_distance=10, labels=thresh) # perform a connected component analysis on the local peaks, # using 8-connectivity, then appy the Watershed algorithm markers = ndimage.label(localMax, structure=np.ones((3, 3)))[0] labels = watershed(-D, markers, mask=thresh) # create a mask mask2 = np.zeros(gray.shape, dtype="uint8") # loop over the unique labels returned by the Watershed algorithm for for label in np.unique(labels): # if the label is zero, we are examining the 'background' so simply ignore it if label == 0: continue # otherwise, allocate memory for the label region and draw # it on the mask mask2[labels == label] = 255 return mask2
def segment(self, data, peaks): """Perform a watershed segmentation based on local maxima.""" markers = np.zeros_like(data) markers[tuple(peaks.T)] = np.arange(len(peaks)) + 1 seg = morphology.watershed(-data, markers, mask=data > 0) return seg, markers
def Image_ws_tranche(image): laser = Detect_laser(image) laser_tranche = tranche_image(laser,60) image_g = skimage.color.rgb2gray(image) image_g = image_g * laser_tranche image_med = rank2.median((image_g*255).astype('uint8'),disk(8)) image_clahe = exposure.equalize_adapthist(image_med, clip_limit=0.03) image_clahe_stretch = exposure.rescale_intensity(image_clahe, out_range=(0, 256)) image_grad = rank2.gradient(image_clahe_stretch,disk(3)) image_grad_mark = image_grad<20 image_grad_forws = rank2.gradient(image_clahe_stretch,disk(1)) image_grad_mark_closed = closing(image_grad_mark,disk(1)) Labelised = (skimage.measure.label(image_grad_mark_closed,8,0))+1 Watersheded = watershed(image_grad_forws,Labelised) cooc = coocurence_liste(Watersheded,laser,3) x,y = compte_occurences(cooc) return x,y
def segmentation(file_name): data_x, data_y, data_z = get_data(file_name) shape_x = len(np.unique(data_x)) shape_y = len(np.unique(data_y)) X = data_x.reshape(shape_x, shape_y) Y = data_y.reshape(shape_x, shape_y) Z = data_z.reshape(shape_x, shape_y) markers = np.zeros_like(Z) markers[Z < 0.15] = 1 markers[Z > 0.3] = 2 elevation_map = roberts(Z) fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(6, 3), sharex=True, sharey=True) # ax.imshow(Z) # ax.imshow(elevation_map, cmap=plt.cm.jet, interpolation='nearest') segmentation = watershed(elevation_map, markers) ax2.imshow(segmentation, interpolation='nearest') # ax.axis('off') # ax.set_title('segmentation') segmentation = ndi.binary_fill_holes(segmentation - 1) labeled_coins, _ = ndi.label(segmentation) ax1.imshow(Z, cmap=plt.cm.gray, interpolation='nearest') ax1.contour(segmentation, [0.5], linewidths=1.2, colors='y') ax1.axis('off') ax1.set_adjustable('box-forced') plt.show()
def watershed(image): hsv_image = color.rgb2hsv(image) low_res_image = rescale(hsv_image[:, :, 0], SCALE) local_mean = mean(low_res_image, disk(50)) local_minimum_flat = np.argmin(local_mean) local_minimum = np.multiply(np.unravel_index(local_minimum_flat, low_res_image.shape), round(1 / SCALE)) certain_bone_pixels = np.full_like(hsv_image[:, :, 0], False, bool) certain_bone_pixels[ local_minimum[0] - INITIAL_WINDOW_SIZE/2:local_minimum[0]+INITIAL_WINDOW_SIZE/2, local_minimum[1] - INITIAL_WINDOW_SIZE/2:local_minimum[1]+INITIAL_WINDOW_SIZE/2 ] = True certain_non_bone_pixels = np.full_like(hsv_image[:, :, 0], False, bool) certain_non_bone_pixels[0:BORDER_SIZE, :] = True certain_non_bone_pixels[-BORDER_SIZE:-1, :] = True certain_non_bone_pixels[:, 0:BORDER_SIZE] = True certain_non_bone_pixels[:, -BORDER_SIZE:-1] = True smoothed_hsv = median(hsv_image[:, :, 0], disk(50)) threshold = MU * np.median(smoothed_hsv[certain_bone_pixels]) possible_bones = np.zeros_like(hsv_image[:, :, 0]) possible_bones[smoothed_hsv < threshold] = 1 markers = np.zeros_like(possible_bones) markers[certain_bone_pixels] = 1 markers[certain_non_bone_pixels] = 2 labels = morphology.watershed(-possible_bones, markers) return labels
def find_neighbors(self, imbin, max_extension): background = np.zeros(imbin.shape) background[imbin==0] = 255 distance = ndi.distance_transform_edt(background) cell_labels = label(imbin, neighbors=4, background=0) # this is a hack, as background pixels obtain label -1 # and we do not want to have negative values. # from version 0.12 this can be removed, probably. cell_labels = cell_labels - cell_labels.min() # the mask is an extension of the initial shape by max_extension. # it can be derived from the distance map (straight forward) mask = np.zeros(imbin.shape) mask[distance < max_extension] = 255 # The watershed of the distance transform of the background. # this corresponds an approximation of the "cells" labels = watershed(distance, cell_labels, mask=mask) if self.settings.debug_screen_output: out_filename = os.path.join(self.settings.img_debug_folder, '%s16_distance.png' % self.prefix) temp = distance / distance.max() skimage.io.imsave(out_filename, temp) out_filename = os.path.join(self.settings.img_debug_folder, '%s16_labels_from_ws.png' % self.prefix) skimage.io.imsave(out_filename, labels) return cell_labels, labels
def segment(image, thresh): #preprocess image gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) #perform euclidean distance transform distances = ndimage.distance_transform_edt(thresh) localMax = peak_local_max(distances, indices = False, min_distance = 3, labels = thresh) #perform connected component analysis on local peaks markers = ndimage.label(localMax, structure = np.ones((3, 3)))[0] labels = watershed(-distances, markers, mask = thresh) #loop over labels returned from watershed to mark them for label in np.unique(labels): if label == 0: continue mask = np.zeros(gray.shape, dtype="uint8") mask[labels == label] = 255 #find contours in mask and choose biggest contour by area contours = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] contour = max(contours, key = cv2.contourArea) #draw circle around max size contour ((x, y), r) = cv2.minEnclosingCircle(contour) cv2.circle(image, (int(x), int(y)), int(r), (0, 255, 0), 2) #show final image cv2.imshow("Output", image) return len(np.unique(labels) - 1)
def waterShed(blob, shape): img = np.zeros(shape, np.uint16) img[zip(*blob)] = 99999 D = ndimage.distance_transform_edt(img) mindist = 7 labels = [1,2,3,4] while len(np.unique(labels)) > 3: mindist += 1 localMax = peak_local_max(D, indices=False, min_distance=mindist, labels=img) markers = ndimage.label(localMax, structure=np.ones((3,3)))[0] labels = watershed(-D, markers, mask=img) subBlobs = [] for label in np.unique(labels): if label == 0: continue ww = np.where(labels==label) bb = zip(ww[0], ww[1]) subBlobs.append(bb) # code.interact(local=locals()) try: return subBlobs, zip(np.where(localMax==True)[0],np.where(localMax==True)[1])[0] except IndexError: return subBlobs, 0
def ImageSegmentation(self): kernel = np.array(self.coords_cell, np.int32) circle = np.zeros(self.image.shape[:2], np.uint8) # link with polylines the coordinates of "left click", thickness could be adjusted, # Could also fill inside the polyline cv2.polylines(circle,[kernel],False,(255,0,0), thickness=5) kernel2 = np.array(self.coords_cell, np.int32) circle2 = np.zeros(self.image.shape[:2], np.uint8) cv2.polylines(circle2,[kernel2],False,(255,0,0), thickness=4) # Segmentation of the protein accumulation using watershed self.segmentation = morphology.watershed(self.clean_image, self.markers, mask = circle) self.segmentation[self.segmentation < 1.5] = 0 self.segmentation = self.segmentation.astype('uint8') # Find contour of the segmented area contours,hierarchy = cv2.findContours(self.segmentation, 1, 2) # Find countour of the masked area contours_circle,hierarchy = cv2.findContours(circle2, 1, 2) self.area = [cv2.contourArea(cnt) for cnt in contours if (cv2.contourArea(cnt))!=0.0] self.area = sum(self.area) self.area_mask = [cv2.contourArea(cnt_cell) for cnt_cell in contours_circle] self.area_mask = sum(self.area_mask) if self.area > 0: self.surface_segmented.append(self.area) if self.area_mask > 0: self.surface_masked.append(self.area_mask)
def segmentationize(imageSe): """ Divides coherent forms of an image in smaller groups of type integer. """ # create an matrix of distances to the next sourrounding area distance = ndimage.distance_transform_edt(imageSe, sampling=3) erosed = ndimage.binary_erosion(imageSe, iterations=8).astype(imageSe.dtype) distanceE = ndimage.distance_transform_edt(erosed, sampling=3) distance += (2 * distanceE) labels, num = label(imageSe, background=0, return_num='True') sizes_image = ndimage.sum(imageSe, labels, range(num)) sizes_image = np.sort(sizes_image, axis=None) pos = int(0.4 * num) areal = int(sizes_image[pos] ** 0.5) if areal <= 10: areal = 10 elif (areal % 2) != 0: areal += 1 footer = circarea(areal) # draw circle area # find the positions of the maxima from the distances local_maxi = peak_local_max(distance, indices=False, footprint=footer, labels=imageSe) markers = label(local_maxi) # watershed algorithm starts at the maxima and returns labels of particles simplefilter("ignore", FutureWarning) # avoid warning in watershed method labels_ws = watershed(-distance, markers, mask=imageSe) simplefilter("default", FutureWarning) return labels, labels_ws, local_maxi
def expand_watershed(self, pubsub_evt): markers = self.matrix image = self.viewer.slice_.matrix self.viewer.slice_.do_threshold_to_all_slices() mask = self.viewer.slice_.current_mask.matrix[1:, 1:, 1:] ww = self.viewer.slice_.window_width wl = self.viewer.slice_.window_level if BRUSH_BACKGROUND in markers and BRUSH_FOREGROUND in markers: tmp_image = ndimage.morphological_gradient(get_LUT_value(image, ww, wl).astype('uint16'), self.mg_size) tmp_mask = watershed(tmp_image, markers) if self.viewer.overwrite_mask: mask[:] = 0 mask[tmp_mask == 1] = 253 else: mask[(tmp_mask==2) & ((mask == 0) | (mask == 2) | (mask == 253))] = 2 mask[(tmp_mask==1) & ((mask == 0) | (mask == 2) | (mask == 253))] = 253 #mask[:] = tmp_mask self.viewer.slice_.current_mask.matrix[0] = 1 self.viewer.slice_.current_mask.matrix[:, 0, :] = 1 self.viewer.slice_.current_mask.matrix[:, :, 0] = 1 self.viewer.slice_.discard_all_buffers() self.viewer.slice_.current_mask.clear_history() Publisher.sendMessage('Reload actual slice')
def seeded_watershed(boundary, seed_threshold = 0, seed_size = 5, mask=None): """Extract seeds from boundary prediction and runs seeded watershed. Args: boundary (3D numpy array) = boundary predictions seed_threshold (int) = Add seeds where boundary prob is <= threshold seed_size (int) = seeds must be >= seed size mask (3D numpy array) = true to watershed, false to ignore Returns: 3d watershed """ from skimage import morphology as skmorph from numpy import bincount # get seeds from scipy.ndimage import label as label2 seeds = label2(boundary <= seed_threshold, output=numpy.uint32)[0] # remove small seeds if seed_size > 0: component_sizes = bincount(seeds.ravel()) small_components = component_sizes < seed_size small_locations = small_components[seeds] seeds[small_locations] = 0 # mask out background (don't have to 0 out seeds since) supervoxels = skmorph.watershed(boundary, seeds, None, None, mask) return supervoxels
def LargestWatershedRegion(shapes,dims,skipBias): L=len(shapes)-skipBias shapes=shapes.reshape((-1,) + dims[1:]) D=len(dims) num_peaks=4 # structure=np.ones(tuple(3*np.ones((np.ndim(shapes)-1,1)))) for ll in range(L): temp=shapes[ll] local_maxi = peak_local_max(gaussian_filter(temp,[1]*(D-1)), exclude_border=False, indices=False, num_peaks=num_peaks) markers,junk = label(local_maxi) nonzero_mask=temp>0 if np.sum(nonzero_mask)>(3**3)*num_peaks: labels = watershed(-temp, markers, mask=nonzero_mask) #watershed regions ind = 1 temp2 = np.copy(temp) temp2[labels!=1]=0 total_intensity = sum(temp2.reshape(-1,)) for kk in range(2,labels.max()+1): temp2 = np.copy(temp) temp2[labels!=kk]=0 total_intensity2 = sum(temp2.reshape(-1,)) if total_intensity2>total_intensity: ind = kk total_intensity=total_intensity2 temp[labels!=ind]=0 shapes[ll]=temp shapes=shapes.reshape((len(shapes),-1)) return shapes
def watershed(base_image, seed_image=None, threshold_distance=80): """ execute watershed with chosen seeds """ from scipy import ndimage as ndi from skimage.morphology import watershed from skimage.feature import peak_local_max from skimage.morphology import label import matplotlib.pyplot as plt distance = ndi.distance_transform_edt(base_image) # imgplot = plt.imshow(distance) fig = plt.figure() # a new figure window ax = fig.add_subplot(1, 1, 1) # specify (nrows, ncols, axnum) # ax.imshow(distance>threshold_distance, cmap='Greys') thresh = distance > threshold_distance ax.imshow(thresh, cmap='Greys') #local_maxi = peak_local_max(distance, labels=jac, footprint=np.ones((100, 100)), indices=False) if seed_image is None: markers = label(thresh) else: markers = label(seed_image) # imgplot = plt.imshow(markers) watersh = watershed(-distance, markers, mask=base_image) # plt.imshow(watersh, cmap=plt.cm.viridis, interpolation='nearest') return watersh
def isolate_single_seed(stack_path, output_path): """Load stack then isolate seed.""" sigma = 10 iterations = 1 raw_stack = Image3D.from_path(stack_path) print raw_stack.shape smoothed = gaussian_filter(raw_stack, sigma).view(Image3D) edges = sobel_magnitude_nd(smoothed).view(Image3D) #edges.save('edges') labels = np.zeros(raw_stack.shape) cx, cy, cz = map(lambda x: x/2, raw_stack.shape) labels[cx, cy, cz] = 1 threshold = threshold_otsu(smoothed) thresholded = smoothed > threshold #thresholded.view(Image3D).save('thresh') segmentation = watershed(edges, markers=labels, mask=thresholded) #segmentation.view(Image3D).save('seg') dilated = binary_dilation(segmentation, iterations=iterations) isolate = np.multiply(raw_stack, dilated) isolate.view(Image3D).save(output_path)
# plt.figure(5) # plt.imshow(segmented_ct_scan_Blur[i], cmap=plt.cm.gray) # plt.figure(6) # plt.imshow(Nodule_Candidate[i], cmap=plt.cm.gray) # # plt.figure(7) # # plt.imshow(Nodule_Candidate_mor[i], cmap=plt.cm.gray) # plt.show() start_time = time.time() Total_Num_Label = 0 nodule_total_array = [] distance1 = ndi.distance_transform_edt(Nodule_Candidate) local_maxi1 = Dist_Thresholding_3D(distance1) markers1 = label(local_maxi1) labels1 = watershed(-distance1, markers1, mask=Nodule_Candidate) nodule_total_array, Total_Num_Label1 = Nodule_Candidate_Extract( pix_resampled, segmented_ct_scan, labels1, patients[i], i, nodule_total_array) Total_Num_Label = Total_Num_Label + Total_Num_Label1 distance2 = ndi.distance_transform_edt(Nodule_Candidate_Op1) local_maxi2 = Dist_Thresholding_3D(distance2) markers = label(local_maxi2) labels2 = watershed(-distance2, markers, mask=Nodule_Candidate_Op1) nodule_total_array, Total_Num_Label2 = Nodule_Candidate_Extract( pix_resampled, segmented_ct_scan, labels2, patients[i], i, nodule_total_array) Total_Num_Label = Total_Num_Label + Total_Num_Label2
def segment(self): # start timing starttime = time.time() # DATA IMPORT AND PREPROCESSING f_directory = os.getcwd() print('reading ' + self.filename + ' ...') raw_img = io.imread(self.filename) default_shape = raw_img.shape print('raw image imported.') # next step's gaussian filter print('performing gaussian filtering...') gaussian_img = np.zeros(shape=default_shape, dtype='float32') for i in range(default_shape[0]): temp_img = np.copy(raw_img[i]) gaussian_img[i] = gaussian_filter(input=raw_img[i], sigma=(2, 2)) print('cytosolic image smoothed.') print('preprocessing complete.') # next step's gaussian filter assumes 60x objective # BINARY THRESHOLDING AND IMAGE CLEANUP print('thresholding...') threshold_img = np.copy(gaussian_img) threshold_img[threshold_img < self.threshold] = 0 threshold_img[threshold_img > 0] = 1 print('filling holes...') filled_img = np.zeros(shape=default_shape, dtype='float32') #Goes through each time course separately and creates a filled image. for i in range(default_shape[0]): temp_img = np.copy(threshold_img[i]) filled_img[i] = binary_fill_holes(temp_img) print('2d holes filled.') print('binary processing complete.') # DISTANCE AND MAXIMA TRANFORMATIONS TO FIND CELLS # next two steps assume 60x objective print('generating distance map...') dist_map = np.zeros(shape=default_shape, dtype='float32') #Goes through each time course separately and distance map. for i in range(default_shape[0]): temp_img = np.copy(filled_img[i]) dist_map[i] = distance_transform_edt(temp_img, sampling=(1, 1)) print('distance map complete.') print('smoothing distance map...') smooth_dist = np.zeros(shape=default_shape, dtype='float32') #Goes through each timecourse separately and creates a smoothed distance map. for i in range(default_shape[0]): temp_img = np.copy(dist_map[i]) smooth_dist[i] = gaussian_filter(temp_img, [4, 4]) print('distance map smoothed.') print('identifying maxima...') max_strel_2d = generate_binary_structure(2, 2) maxima = np.zeros(shape=default_shape, dtype='float32') for i in range(default_shape[0]): maxima[i] = maximum_filter( smooth_dist[i], footprint=max_strel_2d) == smooth_dist[i] bgrd_2d = smooth_dist[i] == 0 eroded_background_2d = binary_erosion(bgrd_2d, structure=max_strel_2d, border_value=1) maxima[i] = np.logical_xor(maxima[i], eroded_background_2d) print('maxima identified.') # WATERSHED SEGMENTATION labs = np.zeros(shape=default_shape, dtype='float32') for i in range(default_shape[0]): labs[i] = self.watershed_labels(maxima[i]) print('watershedding...') cells = np.zeros(shape=default_shape, dtype='float32') for i in range(default_shape[0]): cells[i] = watershed(-smooth_dist[i], labs[i], mask=filled_img[i]) print('raw watershedding complete.') print('cleaning up cells...') clean_cells = np.zeros(shape=default_shape, dtype='float32') for i in range(default_shape[0]): clean_cells[i] = self.reassign_pixels_2d(cells[i]) print('cell cleanup complete.') print('SEGMENTATION OPERATION COMPLETE.') endtime = time.time() runningtime = endtime - starttime print('time elapsed: ' + str(runningtime) + ' seconds') cell_num = [[] for f in range(default_shape[0])] volume = [[] for f in range(default_shape[0])] #Assigns a cell number and volume for each segmented cell in the timecourse. for i in range(default_shape[0]): cell_num[i], volume[i] = np.unique(clean_cells[i], return_counts=True) for j in range(default_shape[0]): cell_num[j] = cell_num[j].astype('uint16') volume[j] = volume[j].astype('uint16') cell_nums = [[] for f in range(default_shape[0])] volumes = [[] for f in range(default_shape[0])] #Creates a dictionary to match cell number to its volume as determined by 2D number of pixels. for k in range(default_shape[0]): volumes[k] = dict(zip(cell_num[k], volume[k])) cell_nums[k] = cell_num[k][np.nonzero(cell_num[k])] #Deletes any artifacts that have a volume of 0. for vol in range(len(volumes)): if 0 in volumes[vol]: del volumes[vol][0] del volumes[vol][0] #Determines the distance from center in order to determine which cell is the mother. print('determining distances...') distances = self.avg_distance_from_center(clean_cells) print('distances determined.') mother = [[] for f in range(default_shape[0])] #Determines which cell is the mother basedo n the smallest distance and determines if that cell has the largest volume. for j in range(default_shape[0]): if volumes[j] == {} or distances[j] == {}: pass else: if self.keywithmaxval(volumes[j]) == self.keywithminval( distances[j]): mother[j] = self.keywithminval(distances[j]) print(j) else: mother[j] = self.keywithminval(distances[j]) print( 'Maximum size does not agree with minimum distance! Mother may be incorrect, assuming min distance.' ) print(j) mother_cell = np.copy(clean_cells) print('eliminating all but mother cell...') #Eliminates the masks for all cells that are not the mother and reassigns the mother's number to 1. for frame in range(default_shape[0]): mother_cell[frame] = np.copy(clean_cells[frame]) mother_cell[frame][clean_cells[frame] != mother[frame]] = 0 mother_cell[frame][mother_cell[frame] > 0] = 1 print('mothers produced.') mother_num = [[] for f in range(default_shape[0])] new_volume = [[] for f in range(default_shape[0])] #Reassigns mother cells volume calculated previously. for i in range(default_shape[0]): mother_num[i], new_volume[i] = np.unique(mother_cell[i], return_counts=True) for j in range(default_shape[0]): mother_cell[j] = mother_cell[j].astype('uint16') new_volume[j] = new_volume[j].astype('uint16') mother_nums = [[] for f in range(default_shape[0])] new_volumes = [[] for f in range(default_shape[0])] for k in range(default_shape[0]): new_volumes[k] = dict(zip(mother_num[k], new_volume[k])) mother_nums[k] = mother_num[k][np.nonzero(mother_num[k])] for vol in range(len(new_volumes)): del new_volumes[vol][0] return SegmentObj(f_directory, self.filename, raw_img, self.threshold, threshold_img, filled_img, dist_map, smooth_dist, maxima, labs, cells, clean_cells, cell_nums, volumes, mother_cell, mother_nums, new_volumes)
def watershed_segmentation(num, img, thresh, gray, manual, outline): master_data = [] target = [] clean = img.copy() cleanContours = np.ones((1000, 1000, 3)) * 255 # cv2.imshow('clean', clean) # cv2.waitKey(0) # cv2.destroyAllWindows() D = ndimage.distance_transform_edt(thresh) localMax = peak_local_max(D, indices=False, min_distance=20, labels=thresh) # perform a connected component analysis on the local peaks, # using 8-connectivity, then appy the Watershed algorithm markers = ndimage.label(localMax, structure=np.ones((3, 3)))[0] labels = watershed(-D, markers, mask=thresh) count = 1 for label in np.unique(labels): # if the label is zero, we are examining the 'background' # so simply ignore it if label == 0: continue # otherwise, allocate memory for the label region and draw # it on the mask mask = np.zeros(gray.shape, dtype="uint8") mask[labels == label] = 255 # detect contours in the mask and grab the largest one cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] cleanContours = cv2.drawContours(cleanContours, cnts, -1, (255, 0, 0), 1) c = max(cnts, key=cv2.contourArea) if c.shape[0] >= 5: if manual: manual_label(c, clean, target) # features = extractFeatures(c) # master_data.append(features) x, y, w, h = cv2.boundingRect(c) if outline: roi = cleanContours[y:y + h, x:x + w] height = np.size(roi, 0) width = np.size(roi, 1) # cv2.fillPoly(img, pts =[contours], color=(255,255,255)) print(width, height) print((128 - w) / 2., (128 - h) / 2.) top = int(math.ceil((128 - h) / 2.)) bottom = int(math.floor((128 - h) / 2.)) left = int(math.ceil((128 - w) / 2.)) right = int(math.floor((128 - w) / 2.)) roi = cv2.copyMakeBorder(roi, top, bottom, left, right, cv2.BORDER_CONSTANT, None, (255, 255, 255)) cv2.imwrite( rootpath + "\Images\cells_new\{0}_c{1}.png".format(num, count), roi) else: roi = clean[y:y + h, x:x + w] # cv2.imwrite(rootpath + "\Images\cells_out\{0}_c{1}.png".format(num, count), roi) cv2.imwrite( rootpath + "\output\{0}_c{1}.png".format(num, count), roi) count += 1 # print num, count return master_data, target
local_maxi = peak_local_max(img2, indices=False, min_distance = 1) imgm2 = img2.copy(); imgm2[local_maxi] = 3 * imgm2.max(); plt.subplot(2,2,3); plt.imshow(imgm2, cmap=plt.cm.jet, interpolation='nearest') local_maxi = peak_local_max(img, indices=False, min_distance = 5) imgm = img.copy(); imgm[local_maxi] = 3 * imgm.max(); plt.subplot(2,2,4); plt.imshow(imgm, cmap=plt.cm.jet, interpolation='nearest') markers = ndi.label(local_maxi)[0] labelsws = watershed(-img, markers, mask = None); labels = labelsws.copy(); labels.max() #if False: # cls = [[1], range(2,6), range(6, 11), range(11,21), range(21, 26), range(26,31)]; # cls = [range(1,3), range(3, 11), range(11,31)]; # for i, c in enumerate(cls): # for cc in c: # labels[labelsws == cc] = i; fig, axes = plt.subplots(ncols=3, sharex=True, sharey=True, subplot_kw={'adjustable':'box-forced'}) ax0, ax1, ax2 = axes ax0.imshow(img, cmap=plt.cm.jet, interpolation='nearest') ax0.set_title('PDF')
# plt.imshow(np.ma.array(green_max,mask=green_max==0),interpolation='none') # plt.show() #------------------------------------------------------------------------------ # SEGMENTATION: EXPANSION BY WATERSHED # Watershedding is a relatively simple but powerful algorithm for expanding # seeds. The image intensity is considered as a topographical map (with high # intensities being "mountains" and low intensities "valleys") and water is # poured into the valleys from each of the seeds. The water first labels the # lowest intensity pixels around the seeds, then continues to fill up. The cell # boundaries are where the waterfronts between different seeds touch. # Get the watershed function and run it from skimage.morphology import watershed green_ws = watershed(green_smooth, green_max) # Show result as transparent overlay # Note: For a better visualization, see "FINDING CELL EDGES" below! # plt.imshow(green_smooth,cmap='gray',interpolation='none') # plt.imshow(green_ws,interpolation='none',alpha=0.7) # plt.show() # Notice that the previously connected cells are now mostly separated and the # membranes are partitioned to their respective cells. # ...however, we now see a few cases of oversegmentation! # This is a typical example of the trade-offs one has to face in any # computational classification task. #------------------------------------------------------------------------------
def segmentByClustering(rgbImage, colorSpace, clusteringMethod, numberOfClusters): #import dataset function from main #Import function check_dataset from main import check_dataset #download dataset and unzip it check_dataset() #Import libraries import matplotlib.pyplot as plt #import os import os #import io, color from skimage import io, color import numpy as np from sklearn.cluster import KMeans from sklearn.mixture import GaussianMixture from sklearn.cluster import AgglomerativeClustering from skimage.feature import peak_local_max from skimage.morphology import watershed from scipy import ndimage import cv2 from main import imshow from scipy import misc from skimage.transform import resize #get the current cwd #cwd = os.getcwd() #get the image path #img_file = os.path.join('BSDS_small',rgbImage) img_file = 'BSR' + '/' + 'BSDS500' + '/' + 'data' + '/' + 'images' + '/' + 'test' + '/' + rgbImage #show the groundtruth segmentation #groundtruth(img_file) #show the groundtruth edges # from main import groundtruth_edges # groundtruth_edges(img_file) #read the image (rgb) imag = io.imread(img_file) from skimage.filters import gaussian rgb = gaussian(imag, sigma=1.5, multichannel=True) #Check all possible color spaces #HSV color space if (colorSpace == 'hsv'): #convert rgb2hsv hsv = color.rgb2hsv(rgb) hsv = cv2.normalize(hsv, np.zeros((hsv.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) #return hsv #plot the image in hsv #plt.imshow(hsv) #plt.show() #shape of the image (fils, cols, channels) = hsv.shape #Check all clustering Methods #Kmeans if (clusteringMethod == 'kmeans'): #reshape the image for kmeans X = hsv.reshape(fils * cols, 3) #Convert data to float #X.astype(float) #kmeans with numberofClusters as a parameter kmeans = KMeans(n_clusters=numberOfClusters, random_state=0).fit(X) #obtaining the segmented image segmented_img = kmeans.cluster_centers_[kmeans.labels_] #reshape the image to the original size segmented_img = kmeans.labels_ #reshape labels to obtain 2d image segmented_img = segmented_img.reshape((fils, cols)) #show the segmentation #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'gmm'): X = hsv.reshape(fils * cols, 3) gmm = GaussianMixture(n_components=numberOfClusters) gmm = gmm.fit(X) cluster = gmm.predict(X) cluster = cluster.reshape(fils, cols) #cluster = cluster.astype(np.uint8) segmented_img = cluster #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'hierarchical'): hsv2 = resize(hsv, (int(hsv.shape[0] / 4), int(hsv.shape[1] / 4)), anti_aliasing=True) hsv2 = cv2.normalize(hsv2, np.zeros((hsv2.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = hsv2.shape X = hsv2.reshape(fils * cols, 3) cluster = AgglomerativeClustering(n_clusters=numberOfClusters, affinity='manhattan', linkage='complete') cluster.fit_predict(X) labels = cluster.labels_ segmented_img = labels.reshape(fils, cols) #segmented_img = segmented_img.astype(np.uint8) segmented_img = misc.imresize(segmented_img, (hsv.shape[0], hsv.shape[1]), interp='bicubic') #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'watershed'): hsv = np.mean(hsv, axis=2) local_maxima = peak_local_max(-1 * hsv, min_distance=15, indices=False, num_peaks=numberOfClusters) marks = ndimage.label(local_maxima)[0] segmented_img = watershed(hsv, marks) segmented_img = segmented_img #imshow(rgb,segmented_img, title= rgbImage) #Check lab color space elif (colorSpace == 'lab'): #convert rgb image to lab lab = color.rgb2lab(rgb) lab = cv2.normalize(lab, np.zeros((lab.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) #show of the image in lab color space #shape of the image (fils, cols, channels) = lab.shape #Check all possible clustering methods #Kmeans if (clusteringMethod == 'kmeans'): #reshape the image for kmeans X = lab.reshape(fils * cols, 3) #Convert data to float #X.astype(float) #kmeans with numberofClusters as a parameter kmeans = KMeans(n_clusters=numberOfClusters, random_state=0).fit(X) #obtaining the segmented image (labels) segmented_img = kmeans.labels_ #reshape labels to obtain 2d image segmented_img = segmented_img.reshape((fils, cols)) #convert it to uint8 #segmented_img = segmented_img.astype(np.uint8) #show the segmentation #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'gmm'): X = lab.reshape(fils * cols, 3) gmm = GaussianMixture(n_components=numberOfClusters) gmm = gmm.fit(X) cluster = gmm.predict(X) cluster = cluster.reshape(fils, cols) #cluster = cluster.astype(np.uint8) segmented_img = cluster #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'hierarchical'): lab2 = resize(lab, (int(lab.shape[0] / 4), int(lab.shape[1] / 4)), anti_aliasing=True) lab2 = cv2.normalize(lab2, np.zeros((lab2.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = lab2.shape X = lab2.reshape(fils * cols, 3) cluster = AgglomerativeClustering(n_clusters=numberOfClusters, affinity='euclidean', linkage='ward') cluster.fit_predict(X) labels = cluster.labels_ segmented_img = labels.reshape(fils, cols) #segmented_img = segmented_img.astype(np.uint8) segmented_img = misc.imresize(segmented_img, (rgb.shape[0], rgb.shape[1]), interp='bicubic') #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'watershed'): lab = np.mean(lab, axis=2) local_maxima = peak_local_max(-1 * lab, min_distance=15, indices=False, num_peaks=numberOfClusters) marks = ndimage.label(local_maxima)[0] segmented_img = watershed(lab, marks) segmented_img = segmented_img #imshow(rgb,segmented_img, title= rgbImage) #Check rgb+xy color space elif (colorSpace == 'rgb_xy'): #size of the image (fils, cols, channels) = rgb.shape pos_x = np.ones((fils, cols), dtype='uint8') pos_y = np.ones((fils, cols), dtype='uint8') for i in range(0, fils): for j in range(0, cols): pos_x[i, :] = i pos_y[:, j] = j pos_x = cv2.normalize(pos_x, np.zeros((pos_x.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) pos_y = cv2.normalize(pos_y, np.zeros((pos_y.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) rgb_xy = np.dstack((rgb, pos_x, pos_y)) if (clusteringMethod == 'kmeans'): X = rgb_xy.reshape(fils * cols, 5) #X.astype(float) kmeans = KMeans(n_clusters=numberOfClusters, random_state=0).fit(X) segmented_img = kmeans.labels_ segmented_img = segmented_img.reshape((fils, cols)) #segmented_img = segmented_img.astype(np.uint8) #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'gmm'): X = rgb_xy.reshape(fils * cols, 5) gmm = GaussianMixture(n_components=numberOfClusters) gmm = gmm.fit(X) cluster = gmm.predict(X) cluster = cluster.reshape(fils, cols) #cluster = cluster.astype(np.uint8) segmented_img = cluster #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'hierarchical'): #X = rgb_xy.reshape(fils*cols, 5) rgb2 = resize(rgb, (int(rgb.shape[0] / 4), int(rgb.shape[1] / 4)), anti_aliasing=True) rgb2 = cv2.normalize(rgb2, np.zeros((rgb2.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = rgb2.shape pos_x = np.ones((fils, cols), dtype='uint8') pos_y = np.ones((fils, cols), dtype='uint8') for i in range(0, fils): for j in range(0, cols): pos_x[i, :] = i pos_y[:, j] = j rgb_xy = np.dstack((rgb2, pos_x, pos_y)) X = rgb_xy.reshape(fils * cols, 5) cluster = AgglomerativeClustering(n_clusters=numberOfClusters, affinity='euclidean', linkage='ward') cluster.fit_predict(X) labels = cluster.labels_ segmented_img = labels.reshape(fils, cols) #segmented_img = segmented_img.astype(np.uint8) segmented_img = misc.imresize(segmented_img, (rgb.shape[0], rgb.shape[1]), interp='nearest') #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'watershed'): rgb_xy = np.mean(rgb, axis=2) local_maxima = peak_local_max(-1 * rgb_xy, min_distance=15, indices=False, num_peaks=numberOfClusters) marks = ndimage.label(local_maxima)[0] segmented_img = watershed(rgb_xy, marks) segmented_img = segmented_img #imshow(rgb,segmented_img, title= rgbImage) elif (colorSpace == 'lab_xy'): #size of the image lab = color.rgb2lab(rgb) lab2 = cv2.normalize(lab, np.zeros((lab.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = lab.shape pos_x = np.ones((fils, cols)) #dtype='uint8') pos_y = np.ones((fils, cols)) #dtype='uint8') for i in range(0, fils): for j in range(0, cols): pos_x[i, :] = i pos_y[:, j] = j pos_x = cv2.normalize(pos_x, np.zeros((pos_x.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) pos_y = cv2.normalize(pos_y, np.zeros((pos_y.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) lab_xy = np.dstack((lab, pos_x, pos_y)) if (clusteringMethod == 'kmeans'): X = lab_xy.reshape(fils * cols, 5) #X.astype(float) kmeans = KMeans(n_clusters=numberOfClusters).fit(X) segmented_img = kmeans.labels_ segmented_img = segmented_img.reshape((fils, cols)) #segmented_img = segmented_img.astype(np.uint8) #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'gmm'): X = lab_xy.reshape(fils * cols, 5) gmm = GaussianMixture(n_components=numberOfClusters) gmm = gmm.fit(X) cluster = gmm.predict(X) cluster = cluster.reshape(fils, cols) #cluster = cluster.astype(np.uint8) segmented_img = cluster #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'hierarchical'): lab2 = resize(lab, (int(lab.shape[0] / 4), int(lab.shape[1] / 4)), anti_aliasing=True) lab2 = cv2.normalize(lab2, np.zeros((lab2.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = lab2.shape pos_x = np.ones((fils, cols), dtype='uint8') pos_y = np.ones((fils, cols), dtype='uint8') for i in range(0, fils): for j in range(0, cols): pos_x[i, :] = i pos_y[:, j] = j lab_xy = np.dstack((lab2, pos_x, pos_y)) X = lab_xy.reshape(fils * cols, 5) cluster = AgglomerativeClustering(n_clusters=numberOfClusters, affinity='euclidean', linkage='ward') cluster.fit_predict(X) labels = cluster.labels_ segmented_img = labels.reshape(fils, cols) #segmented_img = segmented_img.astype(np.uint8) segmented_img = misc.imresize(segmented_img, (lab.shape[0], lab.shape[1]), interp='bicubic') #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'watershed'): lab_xy = np.mean(lab, axis=2) local_maxima = peak_local_max(-1 * lab_xy, min_distance=15, indices=False, num_peaks=numberOfClusters) marks = ndimage.label(local_maxima)[0] segmented_img = watershed(lab_xy, marks) segmented_img = segmented_img #imshow(rgb,segmented_img, title= rgbImage) #Check if the color space is hsv elif (colorSpace == 'hsv_xy'): #size of the image hsv = color.rgb2hsv(rgb) hsv = cv2.normalize(hsv, np.zeros((hsv.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = hsv.shape pos_x = np.ones((fils, cols), dtype='uint8') pos_y = np.ones((fils, cols), dtype='uint8') for i in range(0, fils): for j in range(0, cols): pos_x[i, :] = i pos_y[:, j] = j pos_x = cv2.normalize(pos_x, np.zeros((pos_x.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) pos_y = cv2.normalize(pos_y, np.zeros((pos_y.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) hsv_xy = np.dstack((hsv, pos_x, pos_y)) if (clusteringMethod == 'kmeans'): X = hsv_xy.reshape(fils * cols, 5) #X.astype(float) kmeans = KMeans(n_clusters=numberOfClusters, random_state=0).fit(X) segmented_img = kmeans.labels_ segmented_img = segmented_img.reshape((fils, cols)) #segmented_img = segmented_img.astype(np.uint8) #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'gmm'): X = hsv_xy.reshape(fils * cols, 5) gmm = GaussianMixture(n_components=numberOfClusters) gmm = gmm.fit(X) cluster = gmm.predict(X) cluster = cluster.reshape(fils, cols) #cluster = cluster.astype(np.uint8) segmented_img = cluster #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'hierarchical'): hsv2 = resize(hsv, (int(hsv.shape[0] / 4), int(hsv.shape[1] / 4)), anti_aliasing=True) hsv2 = cv2.normalize(hsv2, np.zeros((hsv2.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = hsv2.shape pos_x = np.ones((fils, cols), dtype='uint8') pos_y = np.ones((fils, cols), dtype='uint8') for i in range(0, fils): for j in range(0, cols): pos_x[i, :] = i pos_y[:, j] = j hsv_xy = np.dstack((hsv2, pos_x, pos_y)) X = hsv_xy.reshape(fils * cols, 5) cluster = AgglomerativeClustering(n_clusters=numberOfClusters, affinity='euclidean', linkage='ward') cluster.fit_predict(X) labels = cluster.labels_ segmented_img = labels.reshape(fils, cols) #segmented_img = segmented_img.astype(np.uint8) #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'watershed'): #hsv_xy=hsv_xy[:,:,2] hsv_xy = np.mean(hsv, axis=2) local_maxima = peak_local_max(-1 * hsv_xy, min_distance=15, indices=False, num_peaks=numberOfClusters) marks = ndimage.label(local_maxima)[0] segmented_img = watershed(hsv_xy, marks) segmented_img = segmented_img #imshow(rgb,segmented_img, title= rgbImage) elif (colorSpace == 'rgb'): (fils, cols, channels) = rgb.shape if (clusteringMethod == 'kmeans'): X = rgb.reshape(fils * cols, 3) #X.astype(float) kmeans = KMeans(n_clusters=numberOfClusters, random_state=0).fit(X) segmented_img = kmeans.labels_ segmented_img = segmented_img.reshape((fils, cols)) #segmented_img = segmented_img.astype(np.uint8) #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'gmm'): X = rgb.reshape(fils * cols, 3) gmm = GaussianMixture(n_components=numberOfClusters) gmm = gmm.fit(X) cluster = gmm.predict(X) cluster = cluster.reshape(fils, cols) #cluster = cluster.astype(np.uint8) segmented_img = cluster #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'hierarchical'): rgb2 = resize(rgb, (int(rgb.shape[0] / 4), int(rgb.shape[1] / 4)), anti_aliasing=True) rgb2 = cv2.normalize(rgb2, np.zeros((rgb2.shape), dtype=np.uint8), alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) (fils, cols, channels) = rgb2.shape X = rgb2.reshape(fils * cols, 3) cluster = AgglomerativeClustering(n_clusters=numberOfClusters, affinity='euclidean', linkage='ward') cluster.fit_predict(X) labels = cluster.labels_ segmented_img = labels.reshape(fils, cols) #segmented_img = segmented_img.astype(np.uint8) segmented_img = misc.imresize(segmented_img, (rgb.shape[0], rgb.shape[1]), interp='bicubic') #imshow(rgb,segmented_img,title = rgbImage) elif (clusteringMethod == 'watershed'): rgb = np.mean(rgb, axis=2) local_maxima = peak_local_max(-1 * rgb, min_distance=15, indices=False, num_peaks=numberOfClusters) marks = ndimage.label(local_maxima)[0] segmented_img = watershed(rgb, marks) segmented_img = segmented_img #imshow(rgb,segmented_img, title= rgbImage) return segmented_img
def nucleus_finder_dt(stack_in, name_in, smooth_type, smooth_size, cont_size, thresh_type, erode, min_radius, max_radius, test, plot, save): print('smooth_size', smooth_size) print('cont_size', cont_size) print('thresh_type', thresh_type) print('erode', erode) print('min_radius', min_radius) print('max_radius', max_radius) print('test', test) print('save', save) # stack_in = stack_in.max(axis=0) # z_size=1 z_size, y_size, x_size = int(np.shape(stack_in)[0]), int( np.shape(stack_in)[1]), int(np.shape(stack_in)[2]) nuclei_count = [] # Need to determine global threshold based on max intensity projetion that has # undergone same preprocessing as the stacks undergo max_int_proj = stack_in.max(axis=0) max_int_proj = preprocessor.smooth(np.copy(max_int_proj), smooth_type, smooth_size, cont_size) max_int_proj = preprocessor.contrast(np.copy(max_int_proj), cont_size) thresh = preprocessor.threshold(np.copy(max_int_proj), thresh_type) #################################################################### # Currently cluster in x and y then z separately - must fix this #################################################################### for im in range(0, z_size): image = stack_in[im] smoothed = preprocessor.smooth(np.copy(image), smooth_type, smooth_size, cont_size) smoothed = preprocessor.contrast(np.copy(smoothed), cont_size) # thresh = preprocessor.threshold(np.copy(smoothed), thresh_type) binary = smoothed > thresh # binary_closed = binary_closing(binary) binary_closed = binary_fill_holes(binary) if erode > 0: print('Performing', str(erode), 'binary erosions') eroded = binary_closed # n_erodes = min_radius // 2 for i in range(0, erode): eroded = binary_erosion(eroded) elif erode == 0: eroded = binary_closed # Distance transform and threshold thereof distance = ndimage.distance_transform_edt(eroded) distance = distance > min_radius distance = ndimage.distance_transform_edt(distance) # Only look for local maxima if there's anything left after erosion: if len(np.where(distance != 0)[0]) == 0: print('Zero distance matrix') labeled = np.zeros_like(image) preprocessor.testplot(image, im, [], [], [], [], smoothed, binary_closed, eroded, distance, labeled) continue # local_maxi = peak_local_max(distance, min_radius, exclude_border=False, indices=False, labels=smoothed) local_maxi = peak_local_max(distance, exclude_border=False, indices=False, footprint=square(3), labels=smoothed) # local_maxi = local_maxima(distance) x, y = np.where(local_maxi != False)[1], np.where( local_maxi != False)[0] # List for storing x and y-coords on each slice x_slice, y_slice = [], [] if len(x) == 0: print('No peaks detected') if len(x) == 1: print('Only one maximum detected') dist_non_zero = np.where(distance != 0) print(dist_non_zero) # Label the 1 remaining region labeled = distance labeled[dist_non_zero] = 1 labeled = labeled.astype(int) print(labeled[np.where(labeled != 0)]) f_prop = regionprops(labeled, intensity_image=image) for d in f_prop: print('len(f_prop)', len(f_prop)) radius = (d.area / np.pi)**0.5 nuclei_count.append([ d.weighted_centroid[1], d.weighted_centroid[0], im, d.area, radius, d.mean_intensity * d.area ]) x_slice.append(d.weighted_centroid[1]) y_slice.append(d.weighted_centroid[0]) # Produce test plot preprocessor.testplot(image, im, x, y, x_slice, y_slice, smoothed, binary_closed, eroded, distance, labeled) if len(x) > 1: # Use hierarchical clustering algorithm to cluster maxima # Could alternatively use structure-based labelling print('Detected', str(len(x)), 'peaks') positions = np.stack((y, x), axis=1) # Distance matrix is n-particles x n-particles in size # This gives the upper triangle of the distance matrix dist_mat = dist.pdist(positions) link_mat = hier.linkage(dist_mat) # fcluster assigns each of the particles in positions a cluster to which it belongs cluster_idx = hier.fcluster(link_mat, min_radius, criterion='distance') particles = np.unique(cluster_idx) markers = np.zeros_like(smoothed) markers[y, x] = cluster_idx # markers = ndimage.label(local_maxi, structure=square(3))[0] # Now that maxima have been clustered, label them by watershedding labeled = watershed(-distance, markers, mask=binary) f_prop = regionprops(labeled, intensity_image=image) for d in f_prop: radius = (d.area / np.pi)**0.5 nuclei_count.append([ d.weighted_centroid[1], d.weighted_centroid[0], im, d.area, radius, d.mean_intensity * d.area ]) x_slice.append(d.weighted_centroid[1]) y_slice.append(d.weighted_centroid[0]) #Plot binary and watershedded images showing identified peaks if test == True: vis.testplot(image, name_in, im, x, y, x_slice, y_slice, smoothed, binary_closed, eroded, distance, labeled) # Now we've scanned all the way through the stack and located things # we think are nuclei in each place. Now we need to cluster them in 3D so # we link together the slices values = [] if len(nuclei_count) > 1: # Cluster nuclei columns = ('x', 'y', 'z', 'area', 'radius', 'intensity') nuclei_count = pd.DataFrame(nuclei_count, columns=columns) # Now cluster nuclei by x, y coordinates positions = np.stack( (nuclei_count['x'].values, nuclei_count['y'].values, nuclei_count['z'].values), axis=1) dist_mat = dist.pdist(positions) link_mat = hier.linkage(dist_mat) cluster_idx = hier.fcluster(link_mat, min_radius, criterion='distance') particles = np.unique(cluster_idx) # Calculate weighted average position of particles nuclei_count['particle'] = cluster_idx for j in particles: current = nuclei_count[nuclei_count['particle'] == j] # Normalisation constant of weighted average norm = np.sum(current['intensity']) x_av = np.sum(current['intensity'] * current['x']) / norm y_av = np.sum(current['intensity'] * current['y']) / norm z_av = np.sum(current['intensity'] * current['z']) / norm a_max = np.amax(current['area']) # Exclude nuclei on minimum intensity if norm > 10000: values.append([x_av, y_av, z_av, a_max, norm]) # Data Frame containing the weighted averages of the locations of the particles columns = ('x', 'y', 'z', 'a_max', 'total_intensity') nuclei_averaged = pd.DataFrame(values, columns=columns) print('Found', len(nuclei_averaged), 'nuclei') if plot == True: # Numbers purely for visualisation purposes plt.clf() xy_scale, z_scale = 1., 2. # Plot 3D visualisation of data fig = plt.figure(figsize=(12, 12)) # xy projection: ax_xy = fig.add_subplot(111) ax_xy.imshow(stack_in.max(axis=0), cmap='gray') ax_xy.scatter(nuclei_averaged['x'], nuclei_averaged['y'], facecolors='none', edgecolors='red', s=100) divider = make_axes_locatable(ax_xy) ax_zx = divider.append_axes("top", 2, pad=0.2, sharex=ax_xy) ax_zx.imshow(stack_in.max(axis=1), aspect=z_scale / xy_scale, cmap='gray') ax_zx.scatter(nuclei_averaged['x'], nuclei_averaged['z'], facecolors='none', edgecolors='red', s=100) ax_yz = divider.append_axes("right", 2, pad=0.2, sharey=ax_xy) ax_yz.imshow(stack_in.max(axis=2).T, aspect=xy_scale / z_scale, cmap='gray') ax_yz.scatter(nuclei_averaged['z'], nuclei_averaged['y'], facecolors='none', edgecolors='red', s=100) plt.draw() if save == True: outname = name_in + '_nuclei_py.tif' plt.savefig(outname, bbox_inches='tight') print(outname, 'saved') plt.close() return nuclei_averaged
def nps_finder_2d_stack(image_in, name_in, z_in=0, thresh_type='yen', smooth_type='mean', smooth_size=1, gauss_size=0, cont_size=0, sep_method='watershed', mass_cutoff=200, area_cutoff=0, max_radius=5, min_radius=1, test=True): # np.copy() used because peak_local_max does weird things to the image histogram if we use the actual image print('mass_cutoff is', mass_cutoff) y_size, x_size = np.shape(image_in) smoothed = np.copy(image_in) # Calculate threshold thresh = preprocessor.threshold(np.copy(smoothed), thresh_type) print('thresh is', thresh) im_max = smoothed.max() print('im_max is', im_max) binary = smoothed > thresh # Two approaches # 1. Identify local maxima in real-space image - separate by watershedding if sep_method == 'old_watershed': print('old_watershed') local_maxi = peak_local_max(np.copy(smoothed), min_distance=min_radius, threshold_abs=thresh, indices=False, labels=np.copy(smoothed)) labeled_image = ndimage.label(local_maxi, structure=disk(1))[0] # markers = ndimage.label(local_maxi)[0] labeled_image = watershed(-labeled_image, labeled_image, mask=binary) # print(labeled_image) if sep_method == 'binary_label': print('binary_label') labeled_image = ndimage.label(binary, structure=disk(1))[0] # print(labeled_image) if sep_method == 'peak_label': print('peak_label') local_maxi = peak_local_max(np.copy(smoothed), min_distance=min_radius, threshold_abs=thresh, indices=False, labels=np.copy(smoothed)) labeled_image = ndimage.label(local_maxi, structure=disk(1))[0] # print(labeled_image) columns = ('x', 'y', 'area', 'intensity') if len(np.where(labeled_image != 0)[0]) == 0: properties = pd.DataFrame([], columns=columns) if len(np.where(labeled_image != 0)[0]) > 0: f_prop = regionprops(labeled_image, intensity_image=smoothed) properties = [] for d in f_prop: properties.append([ d.weighted_centroid[1], d.weighted_centroid[0], d.area, d.mean_intensity * d.area ]) properties = pd.DataFrame(properties, columns=columns) if test == True: preprocessor.testplot_parts(image_in, name_in, z_in, properties['x'], properties['y'], smoothed, binary, labeled_image) return properties
def watershed_segmentation(rgb_img, mask, distance=10): """Uses the watershed algorithm to detect boundary of objects. Needs a marker file which specifies area which is object (white), background (grey), unknown area (black). Inputs: rgb_img = image to perform watershed on needs to be 3D (i.e. np.shape = x,y,z not np.shape = x,y) mask = binary image, single channel, object in white and background black distance = min_distance of local maximum Returns: watershed_header = shape data table headers watershed_data = shape data table values analysis_images = list of output images :param rgb_img: numpy.ndarray :param mask: numpy.ndarray :param distance: int :return watershed_header: list :return watershed_data: list :return analysis_images: list """ # # Will be depricating opencv version 2 # if cv2.__version__[0] == '2': # dist_transform = cv2.distanceTransform(mask, cv2.cv.CV_DIST_L2, maskSize=0) # else: dist_transform = cv2.distanceTransformWithLabels(mask, cv2.DIST_L2, maskSize=0)[0] localMax = peak_local_max(dist_transform, indices=False, min_distance=distance, labels=mask) markers = ndi.label(localMax, structure=np.ones((3, 3)))[0] dist_transform1 = -dist_transform labels = watershed(dist_transform1, markers, mask=mask) img1 = np.copy(rgb_img) for x in np.unique(labels): rand_color = color_palette(len(np.unique(labels))) img1[labels == x] = rand_color[x] img2 = apply_mask(img1, mask, 'black') joined = np.concatenate((img2, rgb_img), axis=1) estimated_object_count = len(np.unique(markers)) - 1 analysis_image = [] analysis_image.append(joined) watershed_header = ( 'HEADER_WATERSHED', 'estimated_object_count' ) watershed_data = ( 'WATERSHED_DATA', estimated_object_count ) if params.debug == 'print': print_image(dist_transform, os.path.join(params.debug_outdir, str(params.device) + '_watershed_dist_img.png')) print_image(joined, os.path.join(params.debug_outdir, str(params.device) + '_watershed_img.png')) elif params.debug == 'plot': plot_image(dist_transform, cmap='gray') plot_image(joined) # Store into global measurements if not 'watershed' in outputs.measurements: outputs.measurements['watershed'] = {} outputs.measurements['watershed']['estimated_object_count'] = estimated_object_count # Store images outputs.images.append(analysis_image) return watershed_header, watershed_data, analysis_image
markers = np.zeros_like(coins) markers[coins < 30] = 1 markers[coins > 150] = 2 fig, ax = plt.subplots(figsize=(4, 3)) ax.imshow(markers, cmap=plt.cm.spectral, interpolation='nearest') ax.axis('off') ax.set_title('markers') """ .. image:: PLOT2RST.current_figure Finally, we use the watershed transform to fill regions of the elevation map starting from the markers determined above: """ segmentation = morphology.watershed(elevation_map, markers) fig, ax = plt.subplots(figsize=(4, 3)) ax.imshow(segmentation, cmap=plt.cm.gray, interpolation='nearest') ax.axis('off') ax.set_title('segmentation') """ .. image:: PLOT2RST.current_figure This last method works even better, and the coins can be segmented and labeled individually. """ from skimage.color import label2rgb
# Segment to connected objects auto_segmented_smoothed_threshold_mask = measure.label(smoothed_threshold_mask) # Watershed from scipy import ndimage as ndi from skimage.morphology import watershed from skimage.feature import peak_local_max distance = ndi.distance_transform_edt(smoothed_threshold_mask) local_maxi = peak_local_max(distance, indices=False, footprint=np.ones((4, 4)), labels=smoothed_threshold_mask) markers = ndi.label(local_maxi)[0] labels = watershed(-distance, markers, mask=smoothed_threshold_mask) plt.imshow(smoothed_threshold_mask) plt.show() plt.imshow(labels) plt.show() len(np.unique(labels)) plt.imshow(gray_image, cmap='gray') # #from scipy import ndimage #from skimage import morphology ## Black tophat transformation (see https://en.wikipedia.org/wiki/Top-hat_transform) #hat = ndimage.black_tophat(gray_image, 7) ## Combine with denoised image #hat -= 0.3 * gray_image
#load image image = cv2.imread(infile) #convert to greyscale grey = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) #thresholding ret, thresh = cv2.threshold(grey, 49, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) #watershed algorithm D = ndimage.distance_transform_edt(thresh) localMax = peak_local_max(D, indices=False, min_distance=20, labels=thresh) markers = ndimage.label(localMax, structure=np.ones((3, 3)))[0] labels = watershed(-D, markers, mask=thresh) for label in np.unique(labels): if label == 0: continue mask = np.zeros(grey.shape, dtype="uint8") mask[labels == label] = 255 contours = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) contours = imutils.grab_contours(contours) # draw contours drawn = cv2.drawContours(grey, contours, -1, (255, 255, 255), 1) #drawing a rectangle in which text will live x, y, w, h = 0, 0, 375, 75
def watershedContours(image): # ============================================================================= # # load the image and perform pyramid mean shift filtering # # to aid the thresholding step # shifted = cv2.pyrMeanShiftFiltering(image, 21, 51) # ============================================================================= bw = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) _, binarized = cv2.threshold(bw, 20, 255.0, cv2.THRESH_BINARY) opened = morphOperation(binarized, operation='opening', times=1, kernel_size=5) # ============================================================================= # showImage(opened, name="Output") # ============================================================================= closed = morphOperation(opened, operation='closing', times=1, kernel_size=5) eroded = eroded = morphOperation(closed, operation='erosion', times=1, kernel_size=35) # ============================================================================= # showImage(closed, name="Output") # ============================================================================= # compute the exact Euclidean distance from every binary # pixel to the nearest zero pixel, then find peaks in this # distance map D = ndimage.distance_transform_edt(eroded) localMax = peak_local_max(D, indices=False, min_distance=30, exclude_border=False) # perform a connected component analysis on the local peaks, # using 8-connectivity, then appy the Watershed algorithm markers = ndimage.label(localMax, structure=np.ones((3, 3)))[0] labels = watershed(-D, markers, mask=closed) # loop over the unique labels returned by the Watershed # algorithm contours = [] for label in np.unique(labels): # if the label is zero, we are examining the 'background' # so simply ignore it if label == 0: continue # otherwise, allocate memory for the label region and draw # it on the mask mask = np.zeros(bw.shape, dtype="uint8") mask[labels == label] = 255 # detect contours in the mask and grab the largest one cnts, hierarchy = cv2.findContours(mask.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) c = max(cnts, key=cv2.contourArea) contours.append(c) contours = np.array(contours) return contours
image = io.imread( '/Users/mahdi/Desktop/1005479.svs (1, 63517, 19793, 927, 1727)1.png', 1) # denoise image denoised = rank.median(image, disk(2)) # find continuous region (low gradient - # where less than 10 for this image) --> markers # disk(5) is used here to get a more smooth image markers = rank.gradient(denoised, disk(1)) < 10 markers = ndi.label(markers)[0] # local gradient (disk(2) is used to keep edges thin) gradient = rank.gradient(denoised, disk(2)) # process the watershed labels = watershed(gradient, markers) # display results fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(8, 8), sharex=True, sharey=True) ax = axes.ravel() ax[0].imshow(image, cmap=plt.cm.gray, interpolation='nearest') ax[0].set_title("Original") ax[1].imshow(gradient, cmap=plt.cm.nipy_spectral, interpolation='nearest') ax[1].set_title("Local Gradient")
def nuclear_expansion_watershed(label, membrane): new_labels = morph.watershed(membrane, markers=label, watershed_line=False) return new_labels
# read pixel size information orig_file = os.path.basename(im_file_list[0]).replace('im_seed_nan_', '') im = PIL.Image.open(os.path.join(training_dir, orig_file)) xres = 0.0254 / im.info['dpi'][0] * 1e6 # um yres = 0.0254 / im.info['dpi'][1] * 1e6 # um # load data full_dataset, full_file_list, full_shuffle_idx = \ cytometer.data.load_datasets(im_file_list, prefix_from='im', prefix_to=['im', 'lab'], nblocks=1) # remove borders between cells in the lab_train data. For this experiment, we want labels touching each other for i in range(full_dataset['lab'].shape[0]): full_dataset['lab'][i, :, :, 0] = watershed( image=np.zeros(shape=full_dataset['lab'].shape[1:3], dtype=full_dataset['lab'].dtype), markers=full_dataset['lab'][i, :, :, 0], watershed_line=False) # relabel background as "0" instead of "1" full_dataset['lab'][full_dataset['lab'] == 1] = 0 # plot example of data if DEBUG: i = 0 plt.clf() plt.subplot(121) plt.imshow(full_dataset['im'][i, :, :, :]) plt.subplot(122) plt.imshow(full_dataset['lab'][i, :, :, 0])
def _skimage_ws(self, NEED_WL): return morphology.watershed(self.dist, self.markers, connectivity=ndimage.generate_binary_structure(3, 3), watershed_line=NEED_WL)
'im', 'lab', 'seg', 'mask', 'predlab_kfold_' + str(i_fold).zfill(2) ], nblocks=1) train_im = datasets['im'] train_seg = datasets['seg'] train_mask = datasets['mask'] train_reflab = datasets['lab'] train_predlab = datasets['predlab_kfold_' + str(i_fold).zfill(2)] del datasets # remove borders between labels for i in range(train_reflab.shape[0]): train_reflab[i, :, :, 0] = watershed(image=np.zeros(shape=train_reflab[i, :, :, 0].shape, dtype=np.uint8), markers=train_reflab[i, :, :, 0], watershed_line=False) # change the background label from 1 to 0 train_reflab[train_reflab == 1] = 0 if DEBUG: i = 250 plt.clf() plt.subplot(221) plt.imshow(train_im[i, :, :, :]) plt.subplot(222) plt.imshow(train_seg[i, :, :, 0]) plt.subplot(223) plt.imshow(train_reflab[i, :, :, 0]) plt.subplot(224)
def watershed_segmentation(rgb_img, mask, distance=10): """Uses the watershed algorithm to detect boundary of objects. Needs a marker file which specifies area which is object (white), background (grey), unknown area (black). Inputs: rgb_img = image to perform watershed on needs to be 3D (i.e. np.shape = x,y,z not np.shape = x,y) mask = binary image, single channel, object in white and background black distance = min_distance of local maximum Returns: analysis_images = list of output images :param rgb_img: numpy.ndarray :param mask: numpy.ndarray :param distance: int :return analysis_images: list """ params.device += 1 # Store debug mode debug = params.debug params.debug = None # # Will be depricating opencv version 2 # if cv2.__version__[0] == '2': # dist_transform = cv2.distanceTransform(mask, cv2.cv.CV_DIST_L2, maskSize=0) # else: dist_transform = cv2.distanceTransformWithLabels(mask, cv2.DIST_L2, maskSize=0)[0] localMax = peak_local_max(dist_transform, indices=False, min_distance=distance, labels=mask) markers = ndi.label(localMax, structure=np.ones((3, 3)))[0] dist_transform1 = -dist_transform labels = watershed(dist_transform1, markers, mask=mask) img1 = np.copy(rgb_img) for x in np.unique(labels): rand_color = color_palette(len(np.unique(labels))) img1[labels == x] = rand_color[x] img2 = apply_mask(img1, mask, 'black') joined = np.concatenate((img2, rgb_img), axis=1) estimated_object_count = len(np.unique(markers)) - 1 # Reset debug mode params.debug = debug if params.debug == 'print': print_image( dist_transform, os.path.join(params.debug_outdir, str(params.device) + '_watershed_dist_img.png')) print_image( joined, os.path.join(params.debug_outdir, str(params.device) + '_watershed_img.png')) elif params.debug == 'plot': plot_image(dist_transform, cmap='gray') plot_image(joined) outputs.add_observation(variable='estimated_object_count', trait='estimated object count', method='plantcv.plantcv.watershed', scale='none', datatype=int, value=estimated_object_count, label='none') # Store images outputs.images.append([dist_transform, joined]) return joined
def extract_binary_masks_blob(A, neuron_radius, dims, num_std_threshold=1, minCircularity=0.5, minInertiaRatio=0.2, minConvexity=.8): """ Function to extract masks from data. It will also perform a preliminary selectino of good masks based on criteria like shape and size Parameters: ---------- A: scipy.sparse matris contains the components as outputed from the CNMF algorithm neuron_radius: float neuronal radius employed in the CNMF settings (gSiz) num_std_threshold: int number of times above iqr/1.349 (std estimator) the median to be considered as threshold for the component minCircularity: float parameter from cv2.SimpleBlobDetector minInertiaRatio: float parameter from cv2.SimpleBlobDetector minConvexity: float parameter from cv2.SimpleBlobDetector Returns: -------- masks: np.array pos_examples: neg_examples: """ params = cv2.SimpleBlobDetector_Params() params.minCircularity = minCircularity params.minInertiaRatio = minInertiaRatio params.minConvexity = minConvexity # Change thresholds params.blobColor = 255 params.minThreshold = 0 params.maxThreshold = 255 params.thresholdStep = 3 params.minArea = np.pi * ((neuron_radius * .75)**2) params.filterByColor = True params.filterByArea = True params.filterByCircularity = True params.filterByConvexity = True params.filterByInertia = True detector = cv2.SimpleBlobDetector_create(params) masks_ws = [] pos_examples = [] neg_examples = [] for count, comp in enumerate(A.tocsc()[:].T): print(count) comp_d = np.array(comp.todense()) gray_image = np.reshape(comp_d, dims, order='F') gray_image = (gray_image - np.min(gray_image)) / \ (np.max(gray_image) - np.min(gray_image)) * 255 gray_image = gray_image.astype(np.uint8) # segment using watershed markers = np.zeros_like(gray_image) elevation_map = sobel(gray_image) thr_1 = np.percentile(gray_image[gray_image > 0], 50) iqr = np.diff(np.percentile(gray_image[gray_image > 0], (25, 75))) thr_2 = thr_1 + num_std_threshold * iqr / 1.35 markers[gray_image < thr_1] = 1 markers[gray_image > thr_2] = 2 edges = watershed(elevation_map, markers) - 1 # only keep largest object label_objects, _ = ndi.label(edges) sizes = np.bincount(label_objects.ravel()) if len(sizes) > 1: idx_largest = np.argmax(sizes[1:]) edges = (label_objects == (1 + idx_largest)) edges = ndi.binary_fill_holes(edges) else: print('empty component') edges = np.zeros_like(edges) masks_ws.append(edges) keypoints = detector.detect((edges * 200.).astype(np.uint8)) if len(keypoints) > 0: pos_examples.append(count) else: neg_examples.append(count) return np.array(masks_ws), np.array(pos_examples), np.array(neg_examples)
def get_ws(): energy = np.array(h.File(heatmap_20_path, 'r')['main'])[0] #energy = energy[np.newaxis, :, :] ##CC seg = get_seg(energy, None, 16) nlabels, count = np.unique(seg, return_counts=True) #count return the times indices = np.argsort(count) nlabels = nlabels[indices] count = count[indices] least_index = np.where(count >= 1000)[0][0] count = count[least_index:] nlabels = nlabels[least_index:] rl = np.arange(seg.max() + 1).astype(seg.dtype) for i in range(seg.max() + 1): if i not in nlabels: rl[i] = 0 seg = rl[seg] # segcc_path = folder + 'WholeSlice/SegCC/whole_segcc_' + str(z) + '.h5' # writeh5(segcc_path,'main',seg) ## Watershed energy = np.array(h.File(heatmap_20_path, 'r')['main'])[0].astype(np.float32) threshold = 150 energy_thres = energy - threshold markers_unlabelled = (energy_thres > 0).astype(int) markers, ncomponents = label_scipy(markers_unlabelled) labels_d, count_d = np.unique(markers, return_counts=True) rl = np.arange(markers.max() + 1).astype(markers.dtype) pixel_threshold = 100 for i in range(len(labels_d)): if count_d[i] < pixel_threshold: rl[labels_d[i]] = 0 markers = rl[markers] mask = (seg > 0).astype(int) # uses cc labels = watershed(-energy, mask=mask, markers=markers) segws_path = folder + 'seg_' + '_' + str(threshold) + '.h5' writeh5(segws_path, 'main', labels) seg_eval = (labels > 0).astype(int) gt = np.array( h.File('/n/pfister_lab2/Lab/xingyu/Human/Dataset/test_gt60.h5', 'r')['main']) gt_eval = (gt > 0).astype(int) print(np.shape(seg_eval)) print(np.shape(gt_eval)) tp = np.sum(gt_eval & seg_eval) fp = np.sum((~gt_eval) & seg_eval) tn = np.sum(gt_eval & (~seg_eval)) print(tp, fp, tn) precision = float(tp) / (tp + fp) recall = tp / (tp + tn) print(precision, recall)
def hierarchical_segmentation(grayscale, pickle_me=False): ''' Segments a grayscale image by first applying adaptive histogram equalization to enhance contrast followed by an Otsu threshold to isolate cell colonies. An adaptive thresholding method is then used to isolate clusters of close-packed cells. Estimated regions of interest for individual cells are finally generated using the Watershed algorithm, and the cell regions are given unique labels and various measurements are calculated for the regions from the original grayscale image. Inputs: ------- grayscale: A grayscale image loaded into a NumPy array pickle_me: Boolean, dumps NumPy arrays of intermediate images to pickle files in the current working directory if True Outputs: -------- labels: The labels associated with the thresholded regions props: The properties of the regions-of-interest measured from the original grayscale image ''' # Apply CLAHE equalized = equalize_adapthist(grayscale, ntiles_x=16, ntiles_y=16, clip_limit=0.01, nbins=256) # Otsu threshold of CLAHE equalized "grayscale" otsu1 = threshold_otsu(equalized) print "Otsu threshold: {0}".format(otsu1) thresh1 = remove_small_objects(equalized > otsu1) colonies = thresh1 * equalized thresh2 = threshold_adaptive(colonies, 21) # Use morphological opening to help separate clusters and remove noise opened = binary_opening(thresh2, selem=disk(3)) clusters = opened * equalized # Generate labels for Watershed using local maxima of the distance # transform as markers distance = distance_transform_edt(opened) local_maxi = peak_local_max(distance, min_distance=6, indices=False, labels=opened) markers = ndimage.label(local_maxi)[0] # plt.imshow(markers) # Apply Watershed labels = watershed(-distance, markers, mask=opened) # plt.imshow(label2rgb(labels)) # Measure labeled region properties in the illumination-corrected image # (not the contrast stretched image) props = regionprops(labels, intensity_image=grayscale) # fig, axs = plt.subplots(2, 4) # axs[0, 0].imshow(equalized, cmap=plt.cm.gray) # axs[0, 0].set_title('CLAHE Equalized') # axs[0, 1].imshow(thresh1, cmap=plt.cm.gray) # axs[0, 1].set_title('Threshold 1, Otsu') # axs[0, 2].imshow(colonies, cmap=plt.cm.gray) # axs[0, 2].set_title('Colonies') # axs[0, 3].imshow(thresh2, cmap=plt.cm.gray) # axs[0, 3].set_title('Threshold 2, Adaptive') # axs[1, 0].imshow(opened, cmap=plt.cm.gray) # axs[1, 0].set_title('Threshold 2, Opened') # axs[1, 1].imshow(clusters, cmap=plt.cm.gray) # axs[1, 1].set_title('Clusters') # axs[1, 2].imshow(distance, cmap=plt.cm.gray) # axs[1, 2].set_title('Distance Transform') # axs[1, 3].imshow(label2rgb(labels)) # axs[1, 3].set_title('Labelled Segmentation') if pickle_me: equalized.dump('CLAHE_equalized.p') thresh1.dump('thresh1_otsu.p') colonies.dump('colonies.p') thresh2.dump('thresh2_adaptive.p') opened.dump('opened_thresh2.p') clusters.dump('clusters.p') distance.dump('distance_transform.p') markers.dump('max_dist_markers.p') return (labels, props)
pixel_threshold = COUNT_THRESH2 for i in range(len(labels_d)): if count_d[i] < pixel_threshold: rl[labels_d[i]] = 0 markers = rl[markers] # Mask for watershed from CC output mask = (seg > 0).astype(int) # Watershed with markers and mask labels = watershed(-energy, mask=mask, markers=markers) # # show contrast results # plt.subplot(1,4,1) # plt.imshow(markers_unlabelled[0,:,:]) # plt.title('original seed') # plt.subplot(1,4,2) # plt.imshow(labels[0]) # plt.title('original seg') # -------------------------------------------------------------------------------------- # iterative watershed # -------------------------------------------------------------------------------------- flags = needIteration(labels, VESICLE_AREA) erosion_map = np.zeros(labels.shape)
# Identifiy markers in distance map localMax = peak_local_max(distance_withred, indices=False, min_distance=minimum_distance_map_distance, labels=thresh) # perform a connected component analysis on the local peaks, # using 8-connectivity, then apply the Watershed algorithm markers_withred = ndimage.label(localMax, structure=np.ones((3, 3)))[0] markersim = np.array(markers * 1000000, dtype="uint8") # In[]: Identify each rsegmented region labels = watershed(-distance_withred, markers_withred, mask=thresh) print("{} unique segments found".format(len(np.unique(labels)) - 1)) # loop over the unique labels returned by the Watershed algorithm maskim = np.zeros(np.shape(image), dtype="uint8") val = 0 radius = 6 r = np.zeros(len(np.unique(labels)) + 1, dtype="uint8") mean_coloursBRG = np.zeros((len(np.unique(labels)) + 1, 4), dtype="uint8") colour_threshold = 160 #120 #COLOUR THRESHOLD for label in np.unique(labels): # if the label is zero, we are examining the 'background' so simply ignore it val = val + 1
cv2.KMEANS_RANDOM_CENTERS) center = np.uint8(center) res = center[label.flatten()] res2 = res.reshape((img.shape)) h, s, v = cv2.split(res2) # Now we want to separate the two objects in image # Generate the markers as local maxima of the distance to the background distance = ndi.distance_transform_edt(v) local_maxi = peak_local_max(distance, indices=False, footprint=np.ones((3, 3)), labels=v) markers = ndi.label(local_maxi)[0] labels = watershed(-distance, markers, mask=v) fig, axes = plt.subplots(ncols=3, figsize=(9, 3), sharex=True, sharey=True, subplot_kw={'adjustable': 'box-forced'}) ax = axes.ravel() ax[0].imshow(v, cmap=plt.cm.gray, interpolation='nearest') ax[0].set_title('Overlapping objects') ax[1].imshow(-distance, cmap=plt.cm.gray, interpolation='nearest') ax[1].set_title('Distances') ax[2].imshow(labels, cmap=plt.cm.spectral, interpolation='nearest') ax[2].set_title('Separated objects') for a in ax:
def cell_boundary(tubulin, hoechst, threshold=80, markers=None): def build_gabor_filters(): filters = [] ksize = 9 for theta in np.arange(0, np.pi, np.pi / 8): kern = cv2.getGaborKernel((ksize, ksize), 4.0, theta, 6.0, 0.5, 0, ktype=cv2.CV_32F) kern /= kern.sum() filters.append(kern) return filters def process_gabor(img, filters): accum = np.zeros_like(img) for kern in filters: fimg = cv2.filter2D(img, cv2.CV_16UC1, kern) np.maximum(accum, fimg, accum) return accum p2 = np.percentile(tubulin, 2) p98 = np.percentile(tubulin, 98) tubulin = exposure.rescale_intensity(tubulin, in_range=(p2, p98)) p2 = np.percentile(hoechst, 2) p98 = np.percentile(hoechst, 98) hoechst = exposure.rescale_intensity(hoechst, in_range=(p2, p98)) # img = np.maximum(tubulin, hoechst) img = tubulin img = morphology.erosion(img, morphology.square(3)) filters = build_gabor_filters() gabor = process_gabor(img, filters) gabor = cv2.convertScaleAbs(gabor, alpha=(255.0 / 65535.0)) ret, bin1 = cv2.threshold(gabor, threshold, 255, cv2.THRESH_BINARY) # gaussian blur on gabor filter result ksize = 31 blur = cv2.GaussianBlur(bin1, (ksize, ksize), 0) ret, cells_mask = cv2.threshold(blur, threshold, 255, cv2.THRESH_OTSU) # ret, bin2 = cv2.threshold(blur, 70, 255, cv2.THRESH_BINARY) if markers is None: # get markers for watershed from hoescht channel hoechst_8 = cv2.convertScaleAbs(hoechst, alpha=(255.0 / 65535.0)) blur_nuc = cv2.GaussianBlur(hoechst_8, (ksize, ksize), 0) ret, bin_nuc = cv2.threshold(blur_nuc, 0, 255, cv2.THRESH_OTSU) markers = ndi.label(bin_nuc)[0] labels = morphology.watershed(-gabor, markers, mask=cells_mask) boundaries_list = list() # loop over the labels for (i, l) in enumerate([l for l in np.unique(labels) if l > 0]): # find contour of mask cell_boundary = np.zeros(shape=labels.shape, dtype=np.uint8) cell_boundary[labels == l] = 255 cnts = cv2.findContours(cell_boundary.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] contour = cnts[0] boundary = np.array([[x, y] for x, y in [i[0] for i in contour]], dtype=np.float32) if len(boundary) >= 3: boundaries_list.append({'id': l, 'boundary': Polygon(boundary)}) return boundaries_list, cells_mask > 255
# Compute compacity of granular material compacity = mask[100:500, 200:600].mean() # Separate the different coins # ------------------------------------------------ erosion = morphology.binary_erosion(mask, morphology.disk(9)) erosion = morphology.binary_erosion(erosion, morphology.disk(5)) labs = morphology.label(erosion, background=0) labs += 1 from scipy import ndimage elevation_map = -ndimage.distance_transform_edt(mask) regions = morphology.watershed(elevation_map, markers=labs, mask=mask) plt.figure(figsize=(12, 3)) plt.subplot(131) plt.imshow(labs, cmap='spectral') plt.axis('off') plt.subplot(132) plt.imshow(elevation_map, cmap='spectral') plt.axis('off') plt.subplot(133) plt.imshow(regions, cmap='spectral') plt.axis('off') plt.tight_layout() # remove borders and relabel regions l0, l1 = img.shape
C_binary_cut = np.zeros([DIM_LAT, DIM_LON]) # r = 500 #the bounding box side = 2r # C_binary_cut[I_idx[0]-r:I_idx[0]+r,I_idx[1]-r:I_idx[1]+r] = C_binary[I_idx[0]-r:I_idx[0]+r,I_idx[1]-r:I_idx[1]+r] C_binary8 = C_binary.astype(np.uint8) kernel = np.ones((3, 3), np.uint8) opening = cv2.morphologyEx(C_binary8, cv2.MORPH_OPEN, kernel, iterations=2) dist_transform = cv2.distanceTransform(opening, cv2.DIST_L2, 0) ret, sure_fg = cv2.threshold(dist_transform, 0.04 * dist_transform.max(), 255, 0) dist_transform = cv2.distanceTransform(opening, cv2.DIST_L2, 0) labels_ws = watershed(-dist_transform, C_flag_core, mask=C_binary8) C_binary8_second = np.where(labels_ws > 0, C_binary8, 0) C_flag_overflow = np.where(labels_ws == 0, 0, labels_ws) C_flag_overflow = C_flag_overflow.astype(np.uint8) #%de') #% Compare the mask obtained from previous mask and the current overflow mask C_flag_compared = C_flag_overflow - C_flag_core blobs_labels_compared = measure.label( C_flag_compared, neighbors=4, background=0) # identify separate blobs volume_core = np.count_nonzero(C_flag_core) C_flag = C_flag_core[:]
def nps_finder_2d(stack_in, name_in, thresh_type, smooth_type, smooth_size=1, gauss_size=0, cont_size=0, sep_method='watershed', mass_cutoff=200, area_cutoff=0, max_radius=5, min_radius=1, test=True, plot=True, save=True): # np.copy() used because peak_local_max does weird things to the image histogram if we use the actual image print('mass_cutoff is', mass_cutoff) z_size, y_size, x_size = np.shape(stack_in) # Generate flatted image from stack max_int_proj = stack_in.max(axis=0) # Image to store labels labeled_image = np.zeros_like(max_int_proj) # Smooth data smoothed = preprocessor.smooth(np.copy(max_int_proj), smooth_type, smooth_size) smoothed = preprocessor.bandpass(np.copy(smoothed), gauss_size) smoothed = preprocessor.contrast(np.copy(smoothed), cont_size) # Calculate threshold thresh = preprocessor.threshold(np.copy(smoothed), thresh_type) # thresh = threshold_local(smoothed, block_size = 31, offset=40) print('thresh is', thresh) im_max = smoothed.max() print('im_max is', im_max) binary = smoothed > thresh # Two approaches # 1. Identify local maxima in real-space image - separate by watershedding if sep_method == 'watershed': print('watershedding') local_maxi = peak_local_max(np.copy(smoothed), min_distance=min_radius, threshold_abs=thresh, indices=False, labels=np.copy(smoothed)) labeled_image = ndimage.label(local_maxi, structure=square(3))[0] markers = ndimage.label(local_maxi)[0] labeled_image = watershed(-labeled_image, markers, mask=binary) # Calculate properties of particles # Properties - area, coords, label, radius properties = [] columns = ('x', 'y', 'area', 'radius', 'intensity') indices = [] # f_prop = regionprops(labeled_image, intensity_image = max_int_proj) f_prop = regionprops(labeled_image, intensity_image=max_int_proj) for d in f_prop: radius = (d.area / np.pi)**0.5 properties.append([ d.weighted_centroid[1], d.weighted_centroid[0], d.area, radius, d.mean_intensity * d.area ]) indices.append(d.label) # if not len(indices): # all_props = pd.DataFrame([], index=[]) indices = pd.Index(indices, name='label') properties = pd.DataFrame(properties, index=indices, columns=columns) properties = properties[properties['intensity'] < 10000] properties = properties[properties['intensity'] > mass_cutoff] properties = properties[properties['area'] > area_cutoff] properties['np_smooth_type'] = smooth_type properties['np_smooth_size'] = smooth_size properties['np_thresh_method'] = thresh_type properties['separation_method'] = sep_method particles_averaged = properties # ################################ # # Create data array in which clustered particles are averaged # # Store computed values # values = [] # if len(properties) == 0: # values.append(['NaN', 'NaN', 'NaN', 'NaN', 'NaN']) # elif len(properties) > 0: # if len(properties) == 1: # properties['particle'] = 1 # elif len(properties) > 1: # # Clustering to eliminate maxima that are too close together # positions = properties[['x', 'y']].values # # Distance matrix is n-particles x n-particles in size - reckon it gives the interparticle separation # # This gives the upper triangle of the distance matrix # dist_mat = dist.pdist(positions) # link_mat = hier.linkage(dist_mat) # # fcluster assigns each of the particles in positions a cluster to which it belongs # cluster_idx = hier.fcluster(link_mat, max_radius, criterion='distance') # properties['particle'] = cluster_idx # particles = np.unique(properties['particle'].values) # for i in particles: # # Calculate weighted average position of particles # current = properties[properties['particle'] == i] # # Normalisation constant of weighted average # norm = np.sum(current['intensity']) # x_av = np.mean(current['x']) # y_av = np.mean(current['y']) # a_total = np.sum(current['area']) # # Geometric cut-off to particle size # if a_total > 2 and a_total < 400: # values.append([x_av, y_av, a_total, norm, i]) # # Data Frame containing the weighted averages of the locations of the particles # columns = ('x', 'y', 'a_total', 'intensity', 'particle') # particles_averaged = pd.DataFrame(values, columns = columns) # print 'Found', len(particles_averaged), 'particles' if test == True: vis.testplot_parts(max_int_proj, name_in, particles_averaged['x'], particles_averaged['y'], smoothed, binary, labeled_image) if plot == True: # Plot data plt.clf() fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) ax.imshow(max_int_proj, cmap='gray', vmin=0, vmax=255) ax.scatter(particles_averaged['x'], particles_averaged['y'], facecolors='none', edgecolors='red', s=100) if save == True: outname = name_in + '_particles.tif' plt.savefig(outname, bbox_inches='tight') print(outname, 'saved') plt.close() return particles_averaged
def water(image, image_rgb, image_labels, index, type_trainortest): # 滤波 过滤噪声 #中值滤波器(median): 返回图像局部区域内的中值,用此中值取代该区域内所有像素值。 denoised = filters.rank.median(image, morphology.disk(5)) # 梯度计算 #返回图像的局部梯度值(最大值 - 最小值),用此梯度值代替区域内所有像素值。 markers_t = filters.rank.gradient(denoised, morphology.disk(5)) ##半径为5的圆形滤波器 ax_img = plt.subplot(2, 2, 1) ax_img.set_title("gradient") ax_img.imshow(markers_t, 'gray') print("markers_t") print(markers_t) # 显示直方图 ax_hist = plt.subplot(2, 2, 2) ax_hist.set_title('hist') ax_hist.hist(markers_t.ravel(), bins=256) #ravel将多维数组降为一维 ax_hist.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) ax_hist.set_xlabel('Pixel intensity') # 梯度计算并选取梯度小的区域作为初始区域 将梯度(中值)值低于20的作为开始标记点 markers = filters.rank.gradient(denoised, morphology.disk(3)) < 10 print("markers111") print(markers) # 根据连通性对局部区域标记 markers = ndi.label(markers)[0] print("matkers222") print(markers) # 梯度计算 gradient = filters.rank.gradient(denoised, morphology.disk(3)) # 根据标记的局部区域进行分水岭分割 labels = morphology.watershed(gradient, markers, mask=image) print("labels:") #print(labels) print(labels.shape[0]) print(labels.shape[1]) # print(labels.shape, labels.max()) # 区域分割效果图转换为边缘提取效果图 oriimg = image_rgb.copy() bwimg = image.copy() for i in range(1, labels.shape[0] - 1): for j in range(1, labels.shape[1] - 1): rect = labels[i - 1:i + 2, j - 1:j + 2] #print(np.mean(rect)) if labels[i, j] != np.mean(rect): #算数平均值。i,j点的像素值与走位区域的平均值不相等,则是边缘 oriimg[i, j] = [255, 0, 0] bwimg[i, j] = 255 else: bwimg[i, j] = 0 #用于灰度扩张 bwimg_wide = ndi.grey_dilation(bwimg, size=(5, 5)) # bwimg_wide = ndi.grey_dilation(bwimg, footprint=ndi.generate_binary_structure(2,1)) # bwimg_wide = ndi.grey_dilation(bwimg_wide, footprint=ndi.generate_binary_structure(2, 1)) # 输出 cv2.imwrite( 'data/' + type_trainortest + '/water_gradient10_kuo3/' + str(index) + '.png', oriimg) cv2.imwrite( 'data/' + type_trainortest + '/wateredge_gradient10_kuo3/' + str(index) + '.png', bwimg) cv2.imwrite( 'data/' + type_trainortest + '/wateredgewide_gradient10_kuo3/' + str(index) + '.png', bwimg_wide) ax_oriimg = plt.subplot(2, 2, 3) ax_oriimg.set_title('original') ax_oriimg.imshow(image_rgb) ax_segimg = plt.subplot(2, 2, 4) ax_segimg.set_title('edge') ax_segimg.imshow(oriimg) #plt.show() fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(6, 6)) axes = axes.ravel() ax0, ax1, ax2, ax3 = axes ax0.imshow(image, cmap=plt.cm.gray, interpolation='nearest') ax0.set_title("Original") ax1.imshow(image_labels, cmap=plt.cm.gray, interpolation='nearest') ax1.set_title("Labels") ax2.imshow(gradient, cmap=plt.cm.gray, interpolation='nearest') ax2.set_title("Gradient") ax3.imshow(bwimg_wide, cmap=plt.cm.gray, interpolation='nearest') ax3.set_title("WideEdge") for ax in axes: ax.axis('off') fig.tight_layout()