def test_empty_selem(): # check that min, max and mean returns zeros if structuring element is # empty image = np.zeros((5, 5), dtype=np.uint16) out = np.zeros_like(image) mask = np.ones_like(image, dtype=np.uint8) res = np.zeros_like(image) image[2, 2] = 255 image[2, 3] = 128 image[1, 2] = 16 elem = np.array([[0, 0, 0], [0, 0, 0]], dtype=np.uint8) rank.mean(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(res, out) rank.geometric_mean(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(res, out) rank.minimum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(res, out) rank.maximum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(res, out)
def filter_img(img): selem = square(11) img[:, :, 0] = rank.mean(img[:, :, 0], selem=selem) img[:, :, 1] = rank.mean(img[:, :, 1], selem=selem) img[:, :, 2] = rank.mean(img[:, :, 2], selem=selem) #return np.array(img, dtype=float) return img_as_float(img)
def test_inplace_output(self): # rank filters are not supposed to filter inplace selem = disk(20) image = (np.random.rand(500, 500) * 256).astype(np.uint8) out = image with pytest.raises(NotImplementedError): rank.mean(image, selem, out=out)
def test_random_sizes(): # make sure the size is not a problem niter = 10 elem = np.array([[1, 1, 1], [1, 1, 1], [1, 1, 1]], dtype=np.uint8) for m, n in np.random.random_integers(1, 100, size=(10, 2)): mask = np.ones((m, n), dtype=np.uint8) image8 = np.ones((m, n), dtype=np.uint8) out8 = np.empty_like(image8) rank.mean(image=image8, selem=elem, mask=mask, out=out8, shift_x=0, shift_y=0) assert_equal(image8.shape, out8.shape) rank.mean(image=image8, selem=elem, mask=mask, out=out8, shift_x=+1, shift_y=+1) assert_equal(image8.shape, out8.shape) image16 = np.ones((m, n), dtype=np.uint16) out16 = np.empty_like(image8, dtype=np.uint16) rank.mean(image=image16, selem=elem, mask=mask, out=out16, shift_x=0, shift_y=0) assert_equal(image16.shape, out16.shape) rank.mean(image=image16, selem=elem, mask=mask, out=out16, shift_x=+1, shift_y=+1) assert_equal(image16.shape, out16.shape) rank.mean_percentile(image=image16, mask=mask, out=out16, selem=elem, shift_x=0, shift_y=0, p0=.1, p1=.9) assert_equal(image16.shape, out16.shape) rank.mean_percentile(image=image16, mask=mask, out=out16, selem=elem, shift_x=+1, shift_y=+1, p0=.1, p1=.9) assert_equal(image16.shape, out16.shape)
def test_selem_dtypes(): image = np.zeros((5, 5), dtype=np.uint8) out = np.zeros_like(image) mask = np.ones_like(image, dtype=np.uint8) image[2, 2] = 255 image[2, 3] = 128 image[1, 2] = 16 for dtype in (np.uint8, np.uint16, np.int32, np.int64, np.float32, np.float64): elem = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]], dtype=dtype) rank.mean(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out) rank.mean_percentile(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out)
def zoom(imName, img, imageclass, conf): imName = imName+1 im = Image.open(img) if not isdir(conf.output_folder+imageclass): try: mkdir(conf.output_folder+imageclass) except: pass fileMatch = 0 for file in listdir(conf.output_folder+"/"+imageclass): if fnmatch.fnmatch(file, str(imName)+"_zoom_"+'*.jpg'): fileMatch = fileMatch+1 if fileMatch>=3: if conf.VERBOSE: print "exists, so breaking: #"+str(imName)+" in "+str(imageclass) return str(imName) x, y = im.size x1=0 y1=0 means=[] while y1<=y-480: while x1<=x-640: ims = im.crop((x1, y1, x1+640, y1+480)) mean1 = mean(np.array(ims)[:,:,1], disk(700)) means.append(((x1,y1),int(mean1[0][0]))) x1 = x1+160 x1=0 y1 = y1+120 zoomNRC(imName, img, imageclass, conf, conf.mean_threshold, means, im) return str(imName)
def watershed(image): hsv_image = color.rgb2hsv(image) low_res_image = rescale(hsv_image[:, :, 0], SCALE) local_mean = mean(low_res_image, disk(50)) local_minimum_flat = np.argmin(local_mean) local_minimum = np.multiply(np.unravel_index(local_minimum_flat, low_res_image.shape), round(1 / SCALE)) certain_bone_pixels = np.full_like(hsv_image[:, :, 0], False, bool) certain_bone_pixels[ local_minimum[0] - INITIAL_WINDOW_SIZE/2:local_minimum[0]+INITIAL_WINDOW_SIZE/2, local_minimum[1] - INITIAL_WINDOW_SIZE/2:local_minimum[1]+INITIAL_WINDOW_SIZE/2 ] = True certain_non_bone_pixels = np.full_like(hsv_image[:, :, 0], False, bool) certain_non_bone_pixels[0:BORDER_SIZE, :] = True certain_non_bone_pixels[-BORDER_SIZE:-1, :] = True certain_non_bone_pixels[:, 0:BORDER_SIZE] = True certain_non_bone_pixels[:, -BORDER_SIZE:-1] = True smoothed_hsv = median(hsv_image[:, :, 0], disk(50)) threshold = MU * np.median(smoothed_hsv[certain_bone_pixels]) possible_bones = np.zeros_like(hsv_image[:, :, 0]) possible_bones[smoothed_hsv < threshold] = 1 markers = np.zeros_like(possible_bones) markers[certain_bone_pixels] = 1 markers[certain_non_bone_pixels] = 2 labels = morphology.watershed(-possible_bones, markers) return labels
def skmean(image): from skimage.filters.rank import mean mean_filtered = mean(image, disk(30)) print mean_filtered.min(), mean_filtered.max() return mean_filtered
def test_smallest_selem16(): # check that min, max and mean returns identity if structuring element # contains only central pixel image = np.zeros((5, 5), dtype=np.uint16) out = np.zeros_like(image) mask = np.ones_like(image, dtype=np.uint8) image[2, 2] = 255 image[2, 3] = 128 image[1, 2] = 16 elem = np.array([[1]], dtype=np.uint8) rank.mean(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out) rank.minimum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out) rank.maximum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out)
def substract_mean(self, radius_disk): circle = disk(radius_disk) for i_rot in np.arange(self.stack_height): stack_slice = self.score_stack[:,:,i_rot] norm_factor = max(stack_slice.min(), stack_slice.max(), key=abs) stack_slice *= 1./norm_factor stack_slice = np.array(rank.mean(stack_slice, selem=circle), dtype=np.float32) stack_slice *= norm_factor self.score_stack[:,:,i_rot] = self.score_stack[:,:,i_rot]-stack_slice
def test_16bit(): image = np.zeros((21, 21), dtype=np.uint16) selem = np.ones((3, 3), dtype=np.uint8) for bitdepth in range(17): value = 2 ** bitdepth - 1 image[10, 10] = value assert rank.minimum(image, selem)[10, 10] == 0 assert rank.maximum(image, selem)[10, 10] == value assert rank.mean(image, selem)[10, 10] == int(value / selem.size)
def smooth(self): # TODO: there is non nan in the ff img, or? mask = self.flatField == 0 from skimage.filters.rank import median, mean from skimage.morphology import disk ff = mean(median(self.flatField, disk(5), mask=~mask), disk(13), mask=~mask) return ff.astype(float) / ff.max(), mask
def _coarsenImage(image, f): ''' seems to be a more precise (but slower) way to down-scale an image ''' from skimage.morphology import square from skimage.filters import rank from skimage.transform._warps import rescale selem = square(f) arri = rank.mean(image, selem=selem) return rescale(arri, 1 / f, order=0)
def test_trivial_selem8(): # check that min, max and mean returns identity if structuring element # contains only central pixel image = np.zeros((5, 5), dtype=np.uint8) out = np.zeros_like(image) mask = np.ones_like(image, dtype=np.uint8) image[2, 2] = 255 image[2, 3] = 128 image[1, 2] = 16 elem = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]], dtype=np.uint8) rank.mean(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out) rank.geometric_mean(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out) rank.minimum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out) rank.maximum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(image, out)
def binary_BF(image, meanse=disk(10), edgefilt='prewitt', opense=disk(10), fill_first=False, bi_thresh=0.000025, tophatse=disk(20)): #convertim = img_as_ubyte(image) meanim = rank.mean(image, meanse) if edgefilt is 'prewitt': edgeim = prewitt(meanim) elif edgefilt is 'sobel': edgeim = sobel(meanim) elif edgefilt is 'scharr': edgeim = scharr(meanim) elif edgefilt is 'roberts': edgeim = roberts(meanim) closeim = closing(edgeim, opense) openim = opening(closeim, opense) if fill_first: seed = np.copy(openim) seed[1:-1, 1:-1] = openim.max() mask = openim filledim = reconstruction(seed, mask, method='erosion') binarim = filledim > bi_thresh else: binarim = openim > bi_thresh * np.mean(openim) seed = np.copy(binarim) seed[1:-1, 1:-1] = binarim.max() mask = binarim filledim = reconstruction(seed, mask, method='erosion') tophim = filledim - closing(white_tophat(filledim, tophatse), opense) > 0.01 fig, ax = plt.subplots(nrows=2, ncols=4, figsize=(16, 8)) ax[0][0].imshow(image, cmap='gray') ax[0][1].imshow(meanim, cmap='gray') ax[0][2].imshow(edgeim, cmap='gray', vmax=4 * np.mean(edgeim)) ax[0][3].imshow(closeim, cmap='gray', vmax=4 * np.mean(closeim)) ax[1][0].imshow(openim, cmap='gray', vmax=4 * np.mean(openim)) ax[1][1].imshow(binarim, cmap='gray') ax[1][2].imshow(filledim, cmap='gray') ax[1][3].imshow(tophim, cmap='gray') for axes in ax: for axe in axes: axe.axis('off') fig.tight_layout() return tophim
def _compute_local_cloudiness(self) -> None: """ Computes the local mean cloudiness from binary cloud masks. Returns: None """ # k = np.ones([self.cloud_window_size, self.cloud_window_size]) # s = convolve(self.cloudy.astype(int), k, mode='constant', cval=0.0) # count = convolve(np.ones(self.cloudy.shape), k, mode='constant', cval=0.0) # self.local_cloudiness = s/count selem = square(self.cloud_window_size) self.local_cloudiness = rank.mean(self.cloudy.astype(int), selem)
def blur_img_in_folder(path): if not os.path.exists(os.path.join(path, "B")): os.mkdir(os.path.join(path, "B")) for file_name in get_file_list_in_dir(path): image = io.imread(os.path.join(path, file_name)) selem = disk(20) color_array = [ rank.mean(image[:, :, dim], selem=selem).reshape(*image.shape[:2], 1) for dim in range(3) ] result = np.concatenate(color_array, axis=2) io.imsave(os.path.join(path, "B", file_name), result)
def fix(): #png_files = [f for f in listdir(depth_folder) if isfile(join(depth_folder, f))] #Parallel(n_jobs=4)(delayed(processInput)(i) for i in range (0, len(png_files))) for i in range(0, len(png_files)): depth = imread(depth_folder + "/" + png_files[i]) depth = rank.mean(depth, rectangle(3, 3), mask=depth != 0) imsave("/Data2TB/FastFood/S1/depth_fixed/" + png_files[i], depth) #dcv = cv2.imread("/Data2TB/FastFood/S1/depth_fixed/" + png_files[i], -1) dcv = (depth / 256).astype('uint8') #dcv = np.zeros((480,640,3), np.uint8) #dcv = cv2.cvtColor(depth,cv2.COLOR_GRAY2RGB,) cv2.imwrite( "/Data2TB/FastFood/S1/depth_paletted_fixed/" + png_files[i], dcv)
def test_empty_selem(): # check that min, max and mean returns zeros if structuring element is # empty image = np.zeros((5, 5), dtype=np.uint16) out = np.zeros_like(image) mask = np.ones_like(image, dtype=np.uint8) res = np.zeros_like(image) image[2, 2] = 255 image[2, 3] = 128 image[1, 2] = 16 elem = np.array([[0, 0, 0], [0, 0, 0]], dtype=np.uint8) rank.mean(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(res, out) rank.minimum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(res, out) rank.maximum(image=image, selem=elem, out=out, mask=mask, shift_x=0, shift_y=0) assert_equal(res, out)
def bleach_location(pre_pixels: np.array, post_pixels: np.array, expected_position=DEFAULT, half_roi_size=DEFAULT): """ Finds the location of a bright spot in the post_pixels image The pre_pixel image will be subtracted from the pos_pixel image (after adding an offset to account for noise), mean filtered, and the position of the maximum in the image will be returned. To speed up finding the location provide the estimated location and an roi within which the real maximum should be located :param pre_pixels: :param post_pixels: :param expected_position: tuple[int, int] with expected position :param half_roi_size: tuple[int, int] area around expected_position to be searched for spot :return: """ # assume pre and post are the same size if expected_position == DEFAULT or half_roi_size == DEFAULT: pre_roi = pre_pixels post_roi = post_pixels else: if (expected_position[0] - half_roi_size[0] < 0) or \ (expected_position[1] - half_roi_size[1] < 0) or \ (expected_position[0] + half_roi_size[0] > post_pixels.shape[0]) or \ (expected_position[1] + half_roi_size[1] > post_pixels.shape[1]): pre_roi = pre_pixels post_roi = post_pixels else: cc = post_pixels.shape[1] - expected_position[1] ep_rc = [expected_position[0], cc] pre_roi = pre_pixels[ep_rc[0] - half_roi_size[0]:ep_rc[0] + half_roi_size[0], ep_rc[1] - half_roi_size[1]:ep_rc[1] + half_roi_size[1]] post_roi = post_pixels[ep_rc[0] - half_roi_size[0]:ep_rc[0] + half_roi_size[0], ep_rc[1] - half_roi_size[1]:ep_rc[1] + half_roi_size[1]] subtracted = post_roi + 100 - pre_roi selem = disk(2) subtracted_mean = rank.mean(subtracted, selem=selem) peaks_rc_roi = peak_local_max(subtracted_mean, min_distance=20, threshold_rel=0.6, num_peaks=1, indices=True) peaks_rc = peaks_rc_roi + [ ep_rc[0] - half_roi_size[0], ep_rc[1] - half_roi_size[1] ] peaks = [peaks_rc[0][0], post_pixels.shape[1] - peaks_rc[0][1]] return peaks
def smooth(image_in, smooth_type, smooth_size, contrast=False): # smoothed = rank.median(np.copy(image_in), disk(smooth_size)) if smooth_type == 'mean': print('running a mean filter') return rank.mean(np.copy(image_in), disk(smooth_size)) if smooth_type == 'median': print('running a median filter') return rank.median(np.copy(image_in), disk(smooth_size)) if smooth_type == 'gaussian': print('running a Gaussian filter') return gaussian(image_in, smooth_size) if smooth_type == 'none': print('not running a filter') return np.copy(image_in)
def test_16bit(): image = np.zeros((21, 21), dtype=np.uint16) selem = np.ones((3, 3), dtype=np.uint8) for bitdepth in range(17): value = 2 ** bitdepth - 1 image[10, 10] = value if bitdepth > 11: expected = ['Bitdepth of %s' % (bitdepth - 1)] else: expected = [] with expected_warnings(expected): assert rank.minimum(image, selem)[10, 10] == 0 assert rank.maximum(image, selem)[10, 10] == value assert rank.mean(image, selem)[10, 10] == int(value / selem.size)
def test_16bit(): image = np.zeros((21, 21), dtype=np.uint16) selem = np.ones((3, 3), dtype=np.uint8) for bitdepth in range(17): value = 2**bitdepth - 1 image[10, 10] = value if bitdepth > 11: expected = ['Bitdepth of %s' % (bitdepth - 1)] else: expected = [] with expected_warnings(expected): assert rank.minimum(image, selem)[10, 10] == 0 assert rank.maximum(image, selem)[10, 10] == value assert rank.mean(image, selem)[10, 10] == int(value / selem.size)
def _segment_edge_areas(self, edges, disk_size, mean_threshold, min_object_size): """ Takes a greyscale image (with brighter colors corresponding to edges) and returns a binary image where white indicates an area with high edge density and black indicates low density. """ # Convert the greyscale edge information into black and white (ie binary) image threshold = threshold_otsu(edges) # Filter out the edge data below the threshold, effectively removing some noise raw_channel_areas = edges <= threshold # Smooth out the data channel_areas = rank.mean(raw_channel_areas, disk(disk_size)) < mean_threshold # Remove specks and blobs that are the result of artifacts clean_channel_areas = remove_small_objects(channel_areas, min_size=min_object_size) # Fill in any areas that are completely surrounded by the areas (hopefully) covering the channels return ndimage.binary_fill_holes(clean_channel_areas)
def ski_uniform_filter(img, myfilter, kernel): # applies a filter to an image # skiimage has different ways to define the kernel # this a function compensates for it if myfilter == "Median": kernel2d = np.ones((kernel, kernel), dtype=bool) result = skimage.filters.median(img, selem=kernel2d) elif myfilter == "Gaussian": result = skimage.filters.gaussian(img, kernel) elif myfilter == "Mean": selem = disk(kernel) result = rank.mean(img, selem=selem) elif myfilter == "None": result = img return result
def test_16bit(self): image = np.zeros((21, 21), dtype=np.uint16) footprint = np.ones((3, 3), dtype=np.uint8) for bitdepth in range(17): value = 2**bitdepth - 1 image[10, 10] = value if bitdepth >= 11: expected = ['Bad rank filter performance'] else: expected = [] with expected_warnings(expected): assert rank.minimum(image, footprint)[10, 10] == 0 assert rank.maximum(image, footprint)[10, 10] == value mean_val = rank.mean(image, footprint)[10, 10] assert mean_val == int(value / footprint.size)
def test_random_sizes(self): # make sure the size is not a problem elem = np.array([[1, 1, 1], [1, 1, 1], [1, 1, 1]], dtype=np.uint8) for m, n in np.random.randint(1, 101, size=(10, 2)): mask = np.ones((m, n), dtype=np.uint8) image8 = np.ones((m, n), dtype=np.uint8) out8 = np.empty_like(image8) rank.mean(image=image8, selem=elem, mask=mask, out=out8, shift_x=0, shift_y=0) assert_equal(image8.shape, out8.shape) rank.mean(image=image8, selem=elem, mask=mask, out=out8, shift_x=+1, shift_y=+1) assert_equal(image8.shape, out8.shape) rank.geometric_mean(image=image8, selem=elem, mask=mask, out=out8, shift_x=0, shift_y=0) assert_equal(image8.shape, out8.shape) rank.geometric_mean(image=image8, selem=elem, mask=mask, out=out8, shift_x=+1, shift_y=+1) assert_equal(image8.shape, out8.shape) image16 = np.ones((m, n), dtype=np.uint16) out16 = np.empty_like(image8, dtype=np.uint16) rank.mean(image=image16, selem=elem, mask=mask, out=out16, shift_x=0, shift_y=0) assert_equal(image16.shape, out16.shape) rank.mean(image=image16, selem=elem, mask=mask, out=out16, shift_x=+1, shift_y=+1) assert_equal(image16.shape, out16.shape) rank.geometric_mean(image=image16, selem=elem, mask=mask, out=out16, shift_x=0, shift_y=0) assert_equal(image16.shape, out16.shape) rank.geometric_mean(image=image16, selem=elem, mask=mask, out=out16, shift_x=+1, shift_y=+1) assert_equal(image16.shape, out16.shape) rank.mean_percentile(image=image16, mask=mask, out=out16, selem=elem, shift_x=0, shift_y=0, p0=.1, p1=.9) assert_equal(image16.shape, out16.shape) rank.mean_percentile(image=image16, mask=mask, out=out16, selem=elem, shift_x=+1, shift_y=+1, p0=.1, p1=.9) assert_equal(image16.shape, out16.shape)
def color_adjustment(self, img, mask=None, gaussian_std=.0, gamma=1.0, contrast = 1.0, brightness = 0, mult_rgb = np.array([1.0, 1.0, 1.0]), blur_radius = 0): img **= gamma img *= contrast img += np.random.randn(*img.shape).astype('float32') * gaussian_std img += brightness img *= mult_rgb np.clip(img, 0.0, 1.0, img) blur_mask = None if mask is not None: blur_mask = random.choice([mask,1-mask,np.ones_like(mask)]) if blur_radius > 0: selem = disk(blur_radius) tmp_img = img.copy() for i in range(img.shape[2]): img[:, :, i] = rank.mean(img[:, :, i], selem=selem,mask=blur_mask) / 255.0 img[np.where(blur_mask == 0)] = tmp_img[np.where(blur_mask==0)] return img
def mask_gen(img_filepath): # Open image img = io.imread(img_filepath) converted_img = img_as_float(img) # bilateral smoothing to preserve borders img_smooth = mean(converted_img, morphology.disk(10)) # Equalize histogram of input image img_histeq = exposure.equalize_adapthist(img_smooth) # Highpass filter for image img_otsu = img_histeq >= filters.threshold_otsu(img_histeq) # generate mask # edges = feature.canny(filters.gaussian(img_histeq), # sigma=1, # # low_threshold=0.01*((2**16)-1), # # high_threshold=0.1*((2**16)-1) # ) # mask = ndi.binary_fill_holes(edges) # mask = morphology.binary_dilation(mask) # mask = ndi.binary_fill_holes(mask) # mask = morphology.binary_opening(mask) # mask = ndi.binary_fill_holes(mask) final_mask = ndi.binary_fill_holes(img_otsu) # remove blobs touching border cleared_mask = segmentation.clear_border(final_mask) label_mask = img_labeler(cleared_mask) mask_centroids = centroids(label_mask) # TODO: test blob removal distances = [] for centroid in mask_centroids: distances.append(ruler(*centroid, len(img) - 1, len(img) - 1)) # Minimum distance centroid from bottom right try: min_idx = distances.index(min(distances)) # print("southeast-most centroid index: " + str(min_idx)) # remove labeled regions in for loop for idx, region in enumerate(measure.regionprops(label_mask)): if idx != min_idx: for region_coord in region.coords: x = region_coord[0] y = region_coord[1] cleared_mask[x, y] = 0 except ValueError: raise ValueError("Couldn't segment", img_filepath) return (img, img_smooth, img_otsu, final_mask, cleared_mask)
def check_all(): np.random.seed(0) image = np.random.rand(25, 25) selem = morphology.disk(1) refs = np.load(os.path.join(skimage.data_dir, "rank_filter_tests.npz")) assert_equal(refs["autolevel"], rank.autolevel(image, selem)) assert_equal(refs["autolevel_percentile"], rank.autolevel_percentile(image, selem)) assert_equal(refs["bottomhat"], rank.bottomhat(image, selem)) assert_equal(refs["equalize"], rank.equalize(image, selem)) assert_equal(refs["gradient"], rank.gradient(image, selem)) assert_equal(refs["gradient_percentile"], rank.gradient_percentile(image, selem)) assert_equal(refs["maximum"], rank.maximum(image, selem)) assert_equal(refs["mean"], rank.mean(image, selem)) assert_equal(refs["geometric_mean"], rank.geometric_mean(image, selem)), assert_equal(refs["mean_percentile"], rank.mean_percentile(image, selem)) assert_equal(refs["mean_bilateral"], rank.mean_bilateral(image, selem)) assert_equal(refs["subtract_mean"], rank.subtract_mean(image, selem)) assert_equal(refs["subtract_mean_percentile"], rank.subtract_mean_percentile(image, selem)) assert_equal(refs["median"], rank.median(image, selem)) assert_equal(refs["minimum"], rank.minimum(image, selem)) assert_equal(refs["modal"], rank.modal(image, selem)) assert_equal(refs["enhance_contrast"], rank.enhance_contrast(image, selem)) assert_equal(refs["enhance_contrast_percentile"], rank.enhance_contrast_percentile(image, selem)) assert_equal(refs["pop"], rank.pop(image, selem)) assert_equal(refs["pop_percentile"], rank.pop_percentile(image, selem)) assert_equal(refs["pop_bilateral"], rank.pop_bilateral(image, selem)) assert_equal(refs["sum"], rank.sum(image, selem)) assert_equal(refs["sum_bilateral"], rank.sum_bilateral(image, selem)) assert_equal(refs["sum_percentile"], rank.sum_percentile(image, selem)) assert_equal(refs["threshold"], rank.threshold(image, selem)) assert_equal(refs["threshold_percentile"], rank.threshold_percentile(image, selem)) assert_equal(refs["tophat"], rank.tophat(image, selem)) assert_equal(refs["noise_filter"], rank.noise_filter(image, selem)) assert_equal(refs["entropy"], rank.entropy(image, selem)) assert_equal(refs["otsu"], rank.otsu(image, selem)) assert_equal(refs["percentile"], rank.percentile(image, selem)) assert_equal(refs["windowed_histogram"], rank.windowed_histogram(image, selem))
def pyramid_decomposition(image): """ Разложение начинается с масштаба исходного изображения. Оно делится на непересекающиеся квадраты размером 2х2 пикселя, в каждом из которых мы получаем значения минимума, максимума и среднего из 4-х пикселей, его составляющих. Далее из этих значений формируем три изображения: минимумов, максимумов и средних, которые уменьшены в 2 раза по горизонтали и вертикали относительно исходного. Повторяем процедуру и раскладываем полученные изображения в пирамиды до уровня, на котором размер ещё составляет не менее 2 пикселей по горизонтали и вертикали. :param image: :return: """ width, height = image.shape factor = 2 max_list, min_list, mean_list = [], [], [] image_2_map = image.copy() map_max = maximum_filter(image_2_map, factor) map_min = minimum_filter(image_2_map, factor) map_mean = mean_percentile(image_2_map, np.ones((2, 2))) / 255. while min(width, height) > 3: width, height = int(width / factor), int(height / factor) map_max = resize(map_max, (width, height)) map_min = resize(map_min, (width, height)) map_mean = resize(map_mean, (width, height)) width, height = map_max.shape max_list.append(map_max) min_list.append(map_min) mean_list.append(map_mean) map_max = maximum_filter(map_max, factor) map_min = minimum_filter(map_min, factor) map_mean = mean(map_mean, np.ones((2, 2))) return max_list, min_list, mean_list
def merge(self, ori_img, mask_img, fil_light=True): img = ori_img.copy() img_s = cv2.split(img) if not self.if_set_border: print("merge base circle") # hough圆范围矩阵 border = np.fromfunction(self.func2, mask_img.shape) else: print("merge base border") self.border_mask = cv2.fillConvexPoly(self.border_mask, self.border_cnt, 0) cv2.imwrite("out.jpg", self.border_mask) mask = cv2.resize(self.border_mask, (ori_img.shape[1], ori_img.shape[0]), cv2.INTER_NEAREST) # 手选范围矩阵 border = np.where(mask == 0, True, False) # 计算总面积 self.all_area = np.sum(border == True) # 叠加检测结果矩阵 border = np.where(mask_img > 0, border, False) # 叠加局部细纹合并矩阵(局部细纹大于30%认定整个区域为杂质区域) bad = sfr.mean(border, disk(15)) border = np.where(bad > 255 * 0.30, True, border) # 原0.25 border = np.where(bad < 255 * 0.08, False, border) # 原0.08 if fil_light: # 叠加过滤光斑矩阵 light = sfr.maximum(self.orig_gray, disk(15)) # light_mean = sfr.mean(self.orig_gray, disk(15)) # light=np.where(light_mean>255*0.9,light,self.orig_gray) border = np.where(light < 230, border, False) mask = np.where(border, mask_img, 0) img_s[0] = np.where(border, 255, img_s[0]) img_s[1] = np.where(border, 0, img_s[1]) img_s[2] = np.where(border, 0, img_s[2]) img = cv2.merge(img_s) # 计算杂质面积 self.dirt_area = np.sum(border == True) return img, mask
def meanFilter(self, m=3, array=numpy.empty(0)): """ Mean filtering, replaces the intensity value, by the average intensity of a pixels neighbours including itself. m is the size of the filter, default is 3x3 @method meanFilter @param m {int} The width and height of the m x m filtering matrix, default is 3. @param array {numpy array} the array which the operation is carried out on. """ self.__printStatus("Mean filtering " + str(m) + "x" + str(m) + "...") if not array.any(): array = self.image_array if array.dtype not in ["uint8", "uint16"]: array = numpy.uint8(array) mean3x3filter = rank.mean(array, square(m), mask=self.mask) self.image_array = mean3x3filter * self.mask self.__printStatus("[done]", True) return self
def create_markers(im, min_size = 3, connectivity=2): im = rank.mean(im, np.ones((3,3))) im = im > threshold_li(im) im = erosion(im, np.ones((3,3))) labels = measure.label(im, connectivity=connectivity) labels_flat = labels.ravel() labels_count = np.bincount(labels_flat) index = np.argsort(labels_flat)[labels_count[0]:] coordinates = np.column_stack(np.unravel_index(index, im.shape)) lab = np.cumsum(labels_count[1:]) im = np.zeros(im.shape, np.uint8) it = dict(enumerate(np.split(coordinates, lab), start=1)) for _, indexes in it.items(): if len(indexes) < min_size: continue center = np.mean(indexes, axis=0) y, x = int(center[0]), int(center[1]) im[y,x] = 255 return im
def background_subtraction(phase_contrast, diameter=19): """Local mean subtraction with clipping. Parameters ----------- phase_contrast : numpy.ndarray Returns -------- Tuple of numpy.ndarray """ # compute local average image selem = disk(diameter) background = mean((phase_contrast * 255).astype('uint8'), selem) / 255. # build difference and clip segmentation = np.maximum(background - phase_contrast, 0) return background, segmentation
def apply_filter(img, v=1): if v == 0: return img[:, :, 0] elif v == 1: return remove_small_blobs(img[:, :, 0], background=255) elif v == 2: selem = disk(1.4) dilatado = dilation( remove_small_blobs(img[:, :, 0], background=255, min_area=10), selem) unblobbed2 = remove_small_blobs(erosion(dilatado, selem), background=255, min_area=15) return rank.mean(unblobbed2, selem=selem) elif v == 3: unblobbed = remove_small_blobs(img[:, :, 0], background=255) selem = disk(1.4) return remove_small_blobs(dilation(unblobbed, selem), background=255, min_area=10) else: pass
def check_all(): np.random.seed(0) image = np.random.rand(25, 25) selem = morphology.disk(1) refs = np.load(os.path.join(skimage.data_dir, "rank_filter_tests.npz")) assert_equal(refs["autolevel"], rank.autolevel(image, selem)) assert_equal(refs["autolevel_percentile"], rank.autolevel_percentile(image, selem)) assert_equal(refs["bottomhat"], rank.bottomhat(image, selem)) assert_equal(refs["equalize"], rank.equalize(image, selem)) assert_equal(refs["gradient"], rank.gradient(image, selem)) assert_equal(refs["gradient_percentile"], rank.gradient_percentile(image, selem)) assert_equal(refs["maximum"], rank.maximum(image, selem)) assert_equal(refs["mean"], rank.mean(image, selem)) assert_equal(refs["mean_percentile"], rank.mean_percentile(image, selem)) assert_equal(refs["mean_bilateral"], rank.mean_bilateral(image, selem)) assert_equal(refs["subtract_mean"], rank.subtract_mean(image, selem)) assert_equal(refs["subtract_mean_percentile"], rank.subtract_mean_percentile(image, selem)) assert_equal(refs["median"], rank.median(image, selem)) assert_equal(refs["minimum"], rank.minimum(image, selem)) assert_equal(refs["modal"], rank.modal(image, selem)) assert_equal(refs["enhance_contrast"], rank.enhance_contrast(image, selem)) assert_equal(refs["enhance_contrast_percentile"], rank.enhance_contrast_percentile(image, selem)) assert_equal(refs["pop"], rank.pop(image, selem)) assert_equal(refs["pop_percentile"], rank.pop_percentile(image, selem)) assert_equal(refs["pop_bilateral"], rank.pop_bilateral(image, selem)) assert_equal(refs["sum"], rank.sum(image, selem)) assert_equal(refs["sum_bilateral"], rank.sum_bilateral(image, selem)) assert_equal(refs["sum_percentile"], rank.sum_percentile(image, selem)) assert_equal(refs["threshold"], rank.threshold(image, selem)) assert_equal(refs["threshold_percentile"], rank.threshold_percentile(image, selem)) assert_equal(refs["tophat"], rank.tophat(image, selem)) assert_equal(refs["noise_filter"], rank.noise_filter(image, selem)) assert_equal(refs["entropy"], rank.entropy(image, selem)) assert_equal(refs["otsu"], rank.otsu(image, selem)) assert_equal(refs["percentile"], rank.percentile(image, selem)) assert_equal(refs["windowed_histogram"], rank.windowed_histogram(image, selem))
def background_subtraction(self, img, method='avg'): #width, height = img.shape if method=='avg': # vigra #kernel = vigra.filters.averagingKernel(radius) #bgsub = img - vigra.filters.convolve(self.ut.to_float(img), kernel) # with skimage se = disk(self.settings.background_subtraction['radius']) bgsub = img.astype(np.dtype('float')) - rank.mean(img, se) bgsub[bgsub < 0] = 0 bgsub = bgsub.astype(img.dtype) if method=='med': # vigra #kernel = vigra.filters.averagingKernel(radius) #bgsub = img - vigra.filters.convolve(self.ut.to_float(img), kernel) # with skimage se = disk(self.settings.background_subtraction['radius']) bgsub = img.astype(np.dtype('float')) - rank.median(img, se) bgsub[bgsub < 0] = 0 bgsub = bgsub.astype(img.dtype) elif method=='constant_median': # vigra #bgsub = img - np.median(np.array(img)) # with skimage bgsub = img - np.median(img) bgsub[bgsub < 0] = 0 bgsub = bgsub.astype(img.dtype) return bgsub
def mean_filter(image, kernel_shape, kernel_size): """Apply a mean filter to a 2-d image. Parameters ---------- image : np.ndarray, np.uint Image with shape (y, x). kernel_shape : str Shape of the kernel used to compute the filter ('diamond', 'disk', 'rectangle' or 'square'). kernel_size : int or Tuple(int) The size of the kernel. For the rectangle we expect two integers (height, width). Returns ------- image_filtered : np.ndarray, np.uint Filtered 2-d image with shape (y, x). """ # check parameters check_array(image, ndim=2, dtype=[np.uint8, np.uint16]) check_parameter(kernel_shape=str, kernel_size=(int, tuple, list)) # get kernel kernel = _define_kernel(shape=kernel_shape, size=kernel_size, dtype=image.dtype) # apply filter image_filtered = rank.mean(image, kernel) return image_filtered
unique = np.unique(R) centroid = ndimage.measurements.center_of_mass(gel, R, unique) # finding the intensity and priting results: for i in range(len(centroid)): print 'Spot %d x: %f y: %f intensity: %d' % (i+1, centroid[i][0], centroid[i][1], gel[int(centroid[i][0]), int(centroid[i][1])]) colored = label2rgb(R, gel, bg_label=0) for i in range(len(centroid)): rr, cc = circle(centroid[i][0], centroid[i][1], 2) colored[rr, cc] = (1,0,0) io.imsave(argv[2], colored) print "Smoothing the image before running watershed..." loc_mean = mean(gel, disk(1)) smooth_M = watershed(loc_mean, ' ') print "Number of spots found on the smoothed image without any post process is:" print len(np.unique(smooth_M)) R = morphology.remove_small_objects(smooth_M.astype(int), 6) colored = label2rgb(R, gel, bg_label=0) io.imsave('smooth_cleaned.png', colored) print "Number of spots found on the smoothed image after image processing is:" print len(np.unique(R)) ############################# #Answering the question 1, using different edge operator methods to detect markers: print 'The number of spots found using roberts gradient method is:' M_roberts = watershed(gel, 'roberts') R_roberts = morphology.remove_small_objects(M_roberts.astype(int), 64) print len(np.unique(R_roberts))
def pipeline(filename): # Report that the pipeline is being executed print " Starting pipeline for", filename # Import tif file import skimage.io as io # Image file manipulation module img = io.imread(filename) # Importing multi-color tif file # Slicing: We only work on one channel for segmentation green = img[0,:,:] #------------------------------------------------------------------------------ # PREPROCESSING AND SIMPLE CELL SEGMENTATION: # (I) SMOOTHING AND (II) ADAPTIVE THRESHOLDING # ------- # Part I # ------- # Gaussian smoothing sigma = 3 # Smoothing factor for Gaussian green_smooth = ndi.filters.gaussian_filter(green,sigma) # Perform smoothing # ------- # Part II # ------- # Create an adaptive background struct = ((np.mgrid[:31,:31][0] - 15)**2 + (np.mgrid[:31,:31][1] - 15)**2) <= 15**2 # Create a disk-shaped structural element from skimage.filters import rank # Import module containing mean filter function bg = rank.mean(green_smooth, selem=struct) # Run a mean filter over the image using the disc # Threshold using created background green_mem = green_smooth >= bg # Clean by morphological hole filling green_mem = ndi.binary_fill_holes(np.logical_not(green_mem)) #------------------------------------------------------------------------------ # IMPROVED CELL SEGMENTATION BY SEEDING AND EXPANSION: # (I) SEEDING BY DISTANCE TRANSFORM # (II) EXPANSION BY WATERSHED # ------- # Part I # ------- # Distance transform on thresholded membranes # Advantage of distance transform for seeding: It is quite robust to local # "holes" in the membranes. green_dt= ndi.distance_transform_edt(green_mem) # Dilating (maximum filter) of distance transform improves results green_dt = ndi.filters.maximum_filter(green_dt,size=10) # Retrieve and label the local maxima from skimage.feature import peak_local_max green_max = peak_local_max(green_dt,indices=False,min_distance=10) # Local maximum detection green_max = ndi.label(green_max)[0] # Labeling # ------- # Part II # ------- # Get the watershed function and run it from skimage.morphology import watershed green_ws = watershed(green_smooth,green_max) #------------------------------------------------------------------------------ # IDENTIFICATION OF CELL EDGES # Define the edge detection function def edge_finder(footprint_values): if (footprint_values == footprint_values[0]).all(): return 0 else: return 1 # Iterate the edge finder over the segmentation green_edges = ndi.filters.generic_filter(green_ws,edge_finder,size=3) #------------------------------------------------------------------------------ # POSTPROCESSING: REMOVING CELLS AT THE IMAGE BORDER # Create a mask for the image boundary pixels boundary_mask = np.ones_like(green_ws) # Initialize with all ones boundary_mask[1:-1,1:-1] = 0 # Set middle square to 0 # Iterate over all cells in the segmentation current_label = 1 for cell_id in np.unique(green_ws): # If the current cell touches the boundary, remove it if np.sum((green_ws==cell_id)*boundary_mask) != 0: green_ws[green_ws==cell_id] = 0 # This is to keep the labeling continuous, which is cleaner else: green_ws[green_ws==cell_id] = current_label current_label += 1 #------------------------------------------------------------------------------ # MEASUREMENTS: SINGLE-CELL AND MEMBRANE READOUTS # Initialize a dict for results of choice results = {"cell_id":[], "green_mean":[], "red_mean":[],"green_membrane_mean":[], "red_membrane_mean":[],"cell_size":[],"cell_outline":[]} # Iterate over segmented cells for cell_id in np.unique(green_ws)[1:]: # Mask the pixels of the current cell cell_mask = green_ws==cell_id edge_mask = np.logical_and(cell_mask,green_edges) # Get the current cell's values # Note that the original raw data is used for quantification! results["cell_id"].append(cell_id) results["green_mean"].append(np.mean(img[0,:,:][cell_mask])) results["red_mean"].append(np.mean(img[1,:,:][cell_mask])) results["green_membrane_mean"].append(np.mean(img[0,:,:][edge_mask])) results["red_membrane_mean"].append(np.mean(img[1,:,:][edge_mask])) results["cell_size"].append(np.sum(cell_mask)) results["cell_outline"].append(np.sum(edge_mask)) #------------------------------------------------------------------------------ # REPORT PROGRESS AND RETURN RESULTS print " Completed pipeline for", filename return green_ws, results
# increasing, objects with bigger sizes are filtered as well, such as the # camera tripod. The median filter is often used for noise removal because # borders are preserved and e.g. salt and pepper noise typically does not # distort the gray-level. # # Image smoothing # =============== # # The example hereunder shows how a local **mean** filter smooths the camera # man image. from skimage.filters.rank import mean fig, (ax1, ax2) = plt.subplots(1, 2, figsize=[10, 7], sharex=True, sharey=True) loc_mean = mean(noisy_image, disk(10)) ax1.imshow(noisy_image, vmin=0, vmax=255, cmap=plt.cm.gray) ax1.set_title('Original') ax1.axis('off') ax1.set_adjustable('box-forced') ax2.imshow(loc_mean, vmin=0, vmax=255, cmap=plt.cm.gray) ax2.set_title('Local mean $r=10$') ax2.axis('off') ax2.set_adjustable('box-forced') ###################################################################### # # One may be interested in smoothing an image while preserving important # borders (median filters already achieved this), here we use the
noise = (df.at[0, 'noise'])[0] adv_im = (df.at[0, 'adv_im'])[0] adv_recon = (df.at[0, 'adv_recon'])[0] if (sys.argv[2] == 'bin'): adv_bin = adv_im.copy() adv_bin = (adv_bin.reshape((-1, 784))).astype(np.float32) adv_bin[adv_bin > 0.5] = 1 adv_bin[adv_bin <= 0.5] = 0 l_noise.b.set_value(np.zeros((784, )).astype(np.float32)) adv_bin_recon = adv_plot(mnist_input(adv_bin))[0] adv_bin_recon = (adv_bin_recon / 255.0).astype(np.float32) if (sys.argv[2] == 'mean_filter'): radius = 1 adv_mean = mean(adv_im, disk(radius)) # adv_mean = (adv_mean.reshape((-1, 784))).astype(np.float32) l_noise.b.set_value(np.zeros((784, )).astype(np.float32)) adv_mean_recon = adv_plot(mnist_input(adv_mean))[0] adv_mean_recon = (adv_mean_recon / 255.0).astype(np.float32) orig_mean = mean(orig.copy().reshape(28, 28), disk(radius)) #print("adv_bin shape: ", adv_bin.shape) #adv_bin_recon = lasagne.layers.get_output(l_dec_x, adv_bin, deterministic = True) #print("type of adv_bin_recon: ", type(adv_bin_recon)) fig = plt.figure(figsize=(10, 10)) img = orig i = 1 title = "Original Image"
def TestSample(self): # Run pump, take samples and analyse global currentPhoto self.im1Count["text"] = "-" # Reset text fields self.im2Count["text"] = "-" self.im3Count["text"] = "-" self.im1Average["text"] = "-" self.im2Average["text"] = "-" self.im3Average["text"] = "-" self.imFinalCount["text"] = "-" self.imFinalAverage["text"] = "-" self.sizeAct["text"] = "-" self.Confidence["text"] = "-" self.ConfDisp["bg"] = "grey" ##''' global camera camera.stop_preview() # Quit preview if open ########################### Run pump and take Pictures ############################### self.pump_On() # Turn on pump self.update_idletasks() # Refresh Gui for x in range(0, 25): # Wait 25 seconds self.labelCurrentAction["text"] = "Pumping Liquid - %d" % (25 - x) self.update_idletasks() time.sleep(1) self.pump_Off() # Turn off pump for x in range(1, 4): # Take 3 images self.pump_Off() self.labelCurrentAction["text"] = "Powder Settle Time" self.update_idletasks() time.sleep(2) self.labelCurrentAction["text"] = "Capturing Image %d" % x camera.hflip = True # Flip camera orientation appropriately camera.vflip = True camera.capture("/home/pi/PythonTempFolder/OrigPic" + str(x) + ".jpg") # Save image to default directory self.update_idletasks() time.sleep(2) if x < 3: self.pump_On() # Turn on pump for y in range(0, 6): # Wait 6 seconds self.labelCurrentAction["text"] = "Pumping Liquid - %d" % (6 - y) self.update_idletasks() time.sleep(1) self.pump_Off() # Turn off pump ##''' ################################################################################################ ########################### Analyse Pictures ############################### for x in range(1, 4): self.labelCurrentAction["text"] = "Loading image as greyscale - im %d" % x self.update_idletasks() image1 = io.imread( "/home/pi/PythonTempFolder/OrigPic" + str(x) + ".jpg", as_grey=True ) # Load image as greyscale ## ##image1 = io.imread('/home/pi/SDP Project/PowderTests/PPIM169041/169041Pic' + str(x) + '.jpg', as_grey=True) ##Comment Out ## self.labelCurrentAction["text"] = "Cropping" # Crop image self.update_idletasks() fromFile = np.asarray(image1, dtype=np.float32) orig = fromFile[0:1080, 420:1500] currentPhoto = orig self.showCurrent() self.update_idletasks() time.sleep(2) self.labelCurrentAction["text"] = "Applying minimum filter" # Apply minimum filter self.update_idletasks() image2 = minimum(orig, disk(6)) currentPhoto = image2 self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() self.labelCurrentAction["text"] = "Applying mean filter" # Apply mean filter self.update_idletasks() image3 = mean(image2, disk(22)) currentPhoto = image3 self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() self.labelCurrentAction["text"] = "Applying maximum filter" # Apply maximum filter self.update_idletasks() image4 = maximum(image3, disk(6)) currentPhoto = image4 self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() time.sleep(2) self.labelCurrentAction["text"] = "Normalising" # Subtract filtered image from original self.update_idletasks() new = np.asarray(image4, dtype=np.float32) new[0:, 0:] = new[0:, 0:] / 255 sub = np.subtract(orig, new) sub[0:, 0:] += 128 / 255 # Scale appropriately imFinal = sub currentPhoto = sub self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() time.sleep(1) self.labelCurrentAction["text"] = "Thresholding (Otsu)" # Get Otsu threshold value from image self.update_idletasks() thresh = threshold_otsu(imFinal) ##Threshold print("T - " + str(thresh)) intensity = float(self.entryIntensity.get()) # Get manual threshold value from text field self.labelCurrentAction[ "text" ] = "Creating Binary Image" # Create binary image from threshold value (changed to manual - ignore otsu) self.update_idletasks() binary = sub <= intensity # 0.095 #(thresh+0.2) scipy.misc.imsave( "/home/pi/PythonTempFolder/binary" + str(x) + ".jpg", binary ) # Save binary image to default directory currentPhoto = binary self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() labels = label(binary) self.labelCurrentAction["text"] = "Analysing Particles" self.update_idletasks() counter = 0 areaCount = 0 Tmin = int(self.entryTmin.get()) # Get size thresholds from text input Tmax = int(self.entryTmax.get()) ################################################################################################ # Tmin = 10 # Tmax = 300 for region in regionprops(labels): # Iterate through particles in the binary image if region.area <= Tmax and region.area >= Tmin: counter = counter + 1 # Count number of particles found areaCount = areaCount + region.area # Sum area of all particles average = areaCount / counter # Calculate average area if x == 1: self.im1Count["text"] = counter self.im1Average["text"] = round(average, 5) # Display average image 1 counter1 = counter average1 = average if x == 2: self.im2Count["text"] = counter self.im2Average["text"] = round(average, 5) # Display average image 2 counter2 = counter average2 = average if x == 3: self.im3Count["text"] = counter self.im3Average["text"] = round(average, 5) # Display average image 3 counter3 = counter average3 = average print(counter) average = areaCount / counter # print(average) self.t.destroy() self.update_idletasks() finalCount = (counter1 + counter2 + counter3) / 3 # Calculate final count all images finalAverage = (average1 + average2 + average3) / 3 # Calculate final average all images self.imFinalCount["text"] = finalCount self.imFinalAverage["text"] = round(finalAverage, 3) microns = (math.sqrt((finalAverage * 113.0989232) / 3.14159265359)) * 2 # Size approximation self.sizeAct["text"] = "~ " + str(round(microns, 3)) + " microns" maxCount = max(counter1, counter2, counter3) Conf = float(finalCount) / float(maxCount) self.Confidence["text"] = str(round(Conf, 3)) + " %" print(finalCount) # print(maxCount) print(Conf) self.ConfDisp["bg"] = "red" # Change confidence colours if Conf >= 0.84: self.ConfDisp["bg"] = "yellow" if Conf >= 0.93: self.ConfDisp["bg"] = "green" self.labelCurrentAction["text"] = "Complete!" self.update_idletasks() time.sleep(2) self.labelCurrentAction["text"] = "idle" self.update_idletasks()
""" import numpy as np import matplotlib.pyplot as plt from skimage import data from skimage.morphology import disk from skimage.filters import rank image = (data.coins()).astype(np.uint16) * 16 selem = disk(20) percentile_result = rank.mean_percentile(image, selem=selem, p0=0.1, p1=0.9) bilateral_result = rank.mean_bilateral(image, selem=selem, s0=500, s1=500) normal_result = rank.mean(image, selem=selem) fig, axes = plt.subplots(nrows=3, figsize=(8, 10)) ax0, ax1, ax2 = axes ax0.imshow(np.hstack((image, percentile_result))) ax0.set_title("Percentile mean") ax0.axis("off") ax1.imshow(np.hstack((image, bilateral_result))) ax1.set_title("Bilateral mean") ax1.axis("off") ax2.imshow(np.hstack((image, normal_result))) ax2.set_title("Local mean")
sigma = 3 # Smoothing factor for Gaussian green_smooth = ndi.filters.gaussian_filter(green,sigma) # Perform smoothing # visualise plt.imshow(green_smooth,interpolation='none',cmap='gray') plt.show() # ------- # Part II # ------- # Create an adaptive background struct = ((np.mgrid[:31,:31][0] - 15)**2 + (np.mgrid[:31,:31][1] - 15)**2) <= 15**2 # Create a disk-shaped structural element from skimage.filters import rank # Import module containing mean filter function bg = rank.mean(green_smooth, selem=struct) # Run a mean filter over the image using the disc # Threshold using created background green_mem = green_smooth >= bg # Clean by morphological hole filling green_mem = ndi.binary_fill_holes(np.logical_not(green_mem)) # Show the result plt.imshow(green_mem,interpolation='none',cmap='gray') plt.show() #%% #------------------------------------------------------------------------------
def TestSample(self): #Run pump, take samples and analyse global currentPhoto self.im1Count["text"] = "-" #Reset text fields self.im2Count["text"] = "-" self.im3Count["text"] = "-" self.im1Average["text"] = "-" self.im2Average["text"] = "-" self.im3Average["text"] = "-" self.imFinalCount["text"] = "-" self.imFinalAverage["text"] = "-" self.sizeAct["text"] = "-" self.Confidence["text"] = "-" self.ConfDisp["bg"] = "grey" ##''' global camera camera.stop_preview() #Quit preview if open ########################### Run pump and take Pictures ############################### self.pump_On() #Turn on pump self.update_idletasks() #Refresh Gui for x in range(0, 25): #Wait 25 seconds self.labelCurrentAction["text"] = "Pumping Liquid - %d" % (25 - x) self.update_idletasks() time.sleep(1) self.pump_Off() #Turn off pump for x in range(1, 4): #Take 3 images self.pump_Off() self.labelCurrentAction["text"] = "Powder Settle Time" self.update_idletasks() time.sleep(2) self.labelCurrentAction["text"] = "Capturing Image %d" % x camera.hflip = True #Flip camera orientation appropriately camera.vflip = True camera.capture('/home/pi/PythonTempFolder/OrigPic' + str(x) + '.jpg') #Save image to default directory self.update_idletasks() time.sleep(2) if (x < 3): self.pump_On() #Turn on pump for y in range(0, 6): #Wait 6 seconds self.labelCurrentAction["text"] = "Pumping Liquid - %d" % ( 6 - y) self.update_idletasks() time.sleep(1) self.pump_Off() #Turn off pump ##''' ################################################################################################ ########################### Analyse Pictures ############################### for x in range(1, 4): self.labelCurrentAction[ "text"] = "Loading image as greyscale - im %d" % x self.update_idletasks() image1 = io.imread('/home/pi/PythonTempFolder/OrigPic' + str(x) + '.jpg', as_grey=True) #Load image as greyscale ## ##image1 = io.imread('/home/pi/SDP Project/PowderTests/PPIM169041/169041Pic' + str(x) + '.jpg', as_grey=True) ##Comment Out ## self.labelCurrentAction["text"] = "Cropping" #Crop image self.update_idletasks() fromFile = np.asarray(image1, dtype=np.float32) orig = fromFile[0:1080, 420:1500] currentPhoto = orig self.showCurrent() self.update_idletasks() time.sleep(2) self.labelCurrentAction[ "text"] = "Applying minimum filter" #Apply minimum filter self.update_idletasks() image2 = minimum(orig, disk(6)) currentPhoto = image2 self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() self.labelCurrentAction[ "text"] = "Applying mean filter" #Apply mean filter self.update_idletasks() image3 = mean(image2, disk(22)) currentPhoto = image3 self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() self.labelCurrentAction[ "text"] = "Applying maximum filter" #Apply maximum filter self.update_idletasks() image4 = maximum(image3, disk(6)) currentPhoto = image4 self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() time.sleep(2) self.labelCurrentAction[ "text"] = "Normalising" #Subtract filtered image from original self.update_idletasks() new = np.asarray(image4, dtype=np.float32) new[0:, 0:] = new[0:, 0:] / 255 sub = np.subtract(orig, new) sub[0:, 0:] += (128 / 255) #Scale appropriately imFinal = sub currentPhoto = sub self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() time.sleep(1) self.labelCurrentAction[ "text"] = "Thresholding (Otsu)" #Get Otsu threshold value from image self.update_idletasks() thresh = threshold_otsu(imFinal) ##Threshold print("T - " + str(thresh)) intensity = float(self.entryIntensity.get() ) #Get manual threshold value from text field self.labelCurrentAction[ "text"] = "Creating Binary Image" #Create binary image from threshold value (changed to manual - ignore otsu) self.update_idletasks() binary = sub <= intensity #0.095 #(thresh+0.2) scipy.misc.imsave('/home/pi/PythonTempFolder/binary' + str(x) + '.jpg', binary) #Save binary image to default directory currentPhoto = binary self.t.destroy() self.update_idletasks() self.showCurrent() self.update_idletasks() labels = label(binary) self.labelCurrentAction["text"] = "Analysing Particles" self.update_idletasks() counter = 0 areaCount = 0 Tmin = int( self.entryTmin.get()) #Get size thresholds from text input Tmax = int(self.entryTmax.get()) ################################################################################################ #Tmin = 10 #Tmax = 300 for region in regionprops( labels): #Iterate through particles in the binary image if (region.area <= Tmax and region.area >= Tmin): counter = counter + 1 #Count number of particles found areaCount = areaCount + region.area #Sum area of all particles average = areaCount / counter #Calculate average area if (x == 1): self.im1Count["text"] = counter self.im1Average["text"] = round(average, 5) #Display average image 1 counter1 = counter average1 = average if (x == 2): self.im2Count["text"] = counter self.im2Average["text"] = round(average, 5) #Display average image 2 counter2 = counter average2 = average if (x == 3): self.im3Count["text"] = counter self.im3Average["text"] = round(average, 5) #Display average image 3 counter3 = counter average3 = average print(counter) average = areaCount / counter #print(average) self.t.destroy() self.update_idletasks() finalCount = (counter1 + counter2 + counter3) / 3 #Calculate final count all images finalAverage = (average1 + average2 + average3) / 3 #Calculate final average all images self.imFinalCount["text"] = finalCount self.imFinalAverage["text"] = round(finalAverage, 3) microns = (math.sqrt((finalAverage * 113.0989232) / 3.14159265359)) * 2 #Size approximation self.sizeAct["text"] = "~ " + str(round(microns, 3)) + " microns" maxCount = max(counter1, counter2, counter3) Conf = float(finalCount) / float(maxCount) self.Confidence["text"] = str(round(Conf, 3)) + " %" print(finalCount) #print(maxCount) print(Conf) self.ConfDisp["bg"] = "red" #Change confidence colours if (Conf >= 0.84): self.ConfDisp["bg"] = "yellow" if (Conf >= 0.93): self.ConfDisp["bg"] = "green" self.labelCurrentAction["text"] = "Complete!" self.update_idletasks() time.sleep(2) self.labelCurrentAction["text"] = "idle" self.update_idletasks()
def prefilter(self, img, method='median'): ps = self.settings.prefilter_settings[method] print print 'prefiltering :', method if method=='median': radius = ps['median_size'] # with vigra #filtered= vigra.filters.discMedian(img, radius) # with skimage pref = rank.median(img, disk(radius)) elif method=='avg': # with skimage se = disk(ps['avg_size']) pref = rank.mean(img, se) elif method=='bilateral': # with skimage se = disk(ps['bil_radius']) pref = rank.mean_bilateral(img, se, s0=ps['bil_lower'], s1=ps['bil_upper']) elif method=='denoise_bilateral': #skimage.filters.denoise_bilateral(image, win_size=5, sigma_range=None, sigma_spatial=1, pref = restoration.denoise_bilateral(img, ps['win_size'], ps['sigma_signal'], ps['sigma_space'], ps['bins'], mode='constant', cval=0, multichannel=False) elif method=='close_rec': se = disk(ps['close_size']) dil = morphology.dilation(img, se) rec = morphology.reconstruction(dil, img, method='erosion') # reconstruction gives back a float image (for whatever reason). pref = rec.astype(dil.dtype) elif method=='denbi_clorec': temp = restoration.denoise_bilateral(img, ps['win_size'], ps['sigma_signal'], ps['sigma_space'], ps['bins'], mode='constant', cval=0, multichannel=False) temp = 255 * temp temp = temp.astype(img.dtype) se = disk(ps['close_size']) dil = morphology.dilation(temp, se) rec = morphology.reconstruction(dil, temp, method='erosion') # reconstruction gives back a float image (for whatever reason). pref = rec.astype(img.dtype) elif method=='denbi_asfrec': temp = restoration.denoise_bilateral(img, ps['win_size'], ps['sigma_signal'], ps['sigma_space'], ps['bins'], mode='constant', cval=0, multichannel=False) temp = 255 * temp temp = temp.astype(img.dtype) se = disk(ps['close_size']) dil = morphology.dilation(temp, se) rec = morphology.reconstruction(dil, temp, method='erosion') se = disk(ps['open_size']) ero = morphology.erosion(rec, se) rec2 = morphology.reconstruction(ero, rec, method='dilation') # reconstruction gives back a float image (for whatever reason). pref = rec2.astype(img.dtype) elif method=='med_denbi_asfrec': if ps['median_size'] > 1: radius = ps['median_size'] pref = rank.median(img, disk(radius)) else: pref = img temp = restoration.denoise_bilateral(pref, ps['win_size'], ps['sigma_signal'], ps['sigma_space'], ps['bins'], mode='constant', cval=0, multichannel=False) temp = 255 * temp temp = temp.astype(img.dtype) if ps['close_size'] > 0 : se = disk(ps['close_size']) dil = morphology.dilation(temp, se) rec = morphology.reconstruction(dil, temp, method='erosion') else: rec = temp if ps['open_size'] > 0: se = disk(ps['open_size']) ero = morphology.erosion(rec, se) rec2 = morphology.reconstruction(ero, rec, method='dilation') else: rec2 = rec # reconstruction gives back a float image (for whatever reason). pref = rec2.astype(img.dtype) return pref
camera1 = exposure.equalize_adapthist(camera1) camera1 = exposure.rescale_intensity(camera1) #camera1 = exposure.adjust_gamma(camera1) camera1 = exposure.adjust_sigmoid(camera1) #camera1 = exposure.adjust_log(camera1) #noise = np.random.random(camera1.shape) #noisy_image = camera1 #noisy_image[noise > 0.98] = 1 #noisy_image[noise < 0.02] = 0 #camera1 = median(noisy_image, disk(1)) #selem = disk(30.0) #camera1 = rank.equalize(camera1, selem=selem) #plt.imshow(camera1, cmap='gray', interpolation='nearest') camera = rgb2gray(camera1) camera = mean(camera, disk(1)) #camera = mean_bilateral(camera, disk(10)) camera = median(camera, disk(1)) from scipy.misc import imsave print(camera) val = filters.threshold_otsu(camera) camera = closing(camera > val, square(3)) camera = opening(camera, square(2)) #camera = closing(camera, square(3)) #camera = opening(camera, square(2)) #camera = opening(camera, square(2)) #camera = mean(camera, disk(1)) #plt.imshow(camera, cmap='gray', interpolation='nearest')
import skimage import matplotlib.pyplot as plt from skimage import io from skimage.exposure import histogram, equalize_hist from skimage.util import random_noise import numpy as np from AMF import AMF from skimage.filters import rank, median from skimage.morphology import disk noise_org = io.imread("../pic/NOISE.bmp", as_gray=True) noise_roi = noise_org[100:200, 100:200] selem = disk(3) noise_mean = rank.mean(noise_roi, selem=selem) hist, hist_centers = histogram(noise_org, nbins=256, normalize=True) noise_median = median(noise_roi, selem=selem) fig, ax = plt.subplots(ncols=3, nrows=2, figsize=(10, 5)) ax[0, 0].imshow(noise_org, cmap=plt.cm.gray) ax[0, 0].axis('off') ax[0, 0].set_title("original") ax[0, 1].imshow(noise_roi, cmap=plt.cm.gray) ax[0, 1].axis('off') ax[0, 1].set_title("ROI") ax[0, 2].plot(hist_centers, hist, lw=2)
from skimage.io import imread from skimage.io import imshow from skimage.io import imsave from skimage.io import show from skimage.filters import rank from skimage.morphology import disk from skimage.color import rgb2gray if __name__ == '__main__': selem = disk(20) mon_image = imread("./test_ng.png") mon_image_nv = rgb2gray(mon_image) im_mean = rank.mean(mon_image_nv, selem=selem) imshow(im_mean) show() imsave("./mean_sk.png", im_mean)
frequencies remain untouched. """ import numpy as np import matplotlib.pyplot as plt from skimage import data from skimage.morphology import disk from skimage.filters import rank image = (data.coins()).astype(np.uint16) * 16 selem = disk(20) percentile_result = rank.mean_percentile(image, selem=selem, p0=.1, p1=.9) bilateral_result = rank.mean_bilateral(image, selem=selem, s0=500, s1=500) normal_result = rank.mean(image, selem=selem) fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(8, 10), sharex=True, sharey=True) ax = axes.ravel() titles = ['Original', 'Percentile mean', 'Bilateral mean', 'Local mean'] imgs = [image, percentile_result, bilateral_result, normal_result] for n in range(0, len(imgs)): ax[n].imshow(imgs[n]) ax[n].set_title(titles[n]) ax[n].set_adjustable('box-forced') ax[n].axis('off')
def compute_local_features(retinal_image): red_channel = retinal_image.preprocessed_image[:, :, 0] green_channel = retinal_image.preprocessed_image[:, :, 1] blue_channel = retinal_image.preprocessed_image[:, :, 2] hue_channel = color.rgb2hsv(retinal_image.preprocessed_image)[:, :, 0] saturation_channel = color.rgb2hsv(retinal_image.preprocessed_image)[:, :, 1] value_channel = color.rgb2hsv(retinal_image.preprocessed_image)[:, :, 2] #mean- large mean_red_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_blue_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_green_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_hue_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_saturation_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_value_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) #mean- small mean_red_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_green_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_blue_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_hue = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_saturation = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_value = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) #minimum- large minimum_red_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_green_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_blue_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_hue_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_saturation_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_value_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) #minimum- small minimum_red_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_green_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_blue_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_hue = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_saturation = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) minimum_value = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) #maximum- large maximum_red_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_green_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_blue_intensity_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_hue_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_saturation_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_value_large = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) #maximum- small maximum_red_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_green_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_blue_intensity = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_hue = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_saturation = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) maximum_value = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) # std- large mean_red_intensity_large1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_red_intensity_large_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_green_intensity_large1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_green_intensity_large_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_blue_intensity_large1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_blue_intensity_large_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_hue_large1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_hue_large_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_saturation_large1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_saturation_large_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_value_large1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_value_large_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) # std- small mean_red_intensity_1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_red_intensity_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_green_intensity_1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_green_intensity_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_blue_intensity_1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_blue_intensity_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_hue_1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_hue_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_saturation_1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_saturation_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_value_1 = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) mean_value_potency = np.zeros( (retinal_image.labels.shape[0], retinal_image.labels.shape[1])) max_labels = np.amax(retinal_image.labels) distanceTransform = ndimage.distance_transform_edt(retinal_image.vessels) diameter = distanceTransform * retinal_image.skeletonWithoutCrossings meanDiameterInRegion = np.zeros(np.max(retinal_image.labels)) for i in range(1, max_labels + 1): meanDiameterInRegion[i - 1] = np.mean( diameter[retinal_image.labels == (i)]) disk_diameter = meanDiameterInRegion[i - 1] # disk_diameter=2; disk_diameter_large = 2 * disk_diameter labels_points = (retinal_image.labels == i) labels_points_indexes = np.nonzero(labels_points) labels_points_indexes = list(labels_points_indexes) rows = (labels_points_indexes)[0] cols = (labels_points_indexes)[1] #labels_points_int=labels_points.astype(int) #mean_intensity[rows,cols]=mean(img_rgb[labels_points==True], disk(disk_diameter)) #mean_intensity[rows,cols]=mean(red_channel[rows,cols], disk(disk_diameter)) #mean- large mean_red_intensity_large_iteration = mean(red_channel, disk(disk_diameter_large)) mean_red_intensity_large[ rows, cols] = mean_red_intensity_large_iteration[rows, cols] mean_green_intensity_large_iteration = mean(green_channel, disk(disk_diameter_large)) mean_green_intensity_large[ rows, cols] = mean_green_intensity_large_iteration[rows, cols] mean_blue_intensity_large_iteration = mean(blue_channel, disk(disk_diameter_large)) mean_blue_intensity_large[ rows, cols] = mean_blue_intensity_large_iteration[rows, cols] mean_hue_large_iteration = mean(hue_channel, disk(disk_diameter_large)) mean_hue_large[rows, cols] = mean_hue_large_iteration[rows, cols] mean_saturation_large_iteration = mean(saturation_channel, disk(disk_diameter_large)) mean_saturation_large[rows, cols] = mean_saturation_large_iteration[rows, cols] mean_value_large_iteration = mean(value_channel, disk(disk_diameter_large)) mean_value_large[rows, cols] = mean_value_large_iteration[rows, cols] #mean- small mean_red_intensity_iteration = mean(red_channel, disk(disk_diameter)) mean_red_intensity[rows, cols] = mean_red_intensity_iteration[rows, cols] mean_green_intensity_iteration = mean(green_channel, disk(disk_diameter)) mean_green_intensity[rows, cols] = mean_green_intensity_iteration[rows, cols] mean_blue_intensity_iteration = mean(blue_channel, disk(disk_diameter)) mean_blue_intensity[rows, cols] = mean_blue_intensity_iteration[rows, cols] mean_hue_iteration = mean(hue_channel, disk(disk_diameter)) mean_hue[rows, cols] = mean_hue_iteration[rows, cols] mean_saturation_iteration = mean(saturation_channel, disk(disk_diameter)) mean_saturation[rows, cols] = mean_saturation_iteration[rows, cols] mean_value_iteration = mean(value_channel, disk(disk_diameter)) mean_value[rows, cols] = mean_value_iteration[rows, cols] #minimum- large minimum_red_intensity_iteration = minimum(red_channel, disk(disk_diameter)) minimum_red_intensity[rows, cols] = minimum_red_intensity_iteration[rows, cols] minimum_green_intensity_iteration = minimum(green_channel, disk(disk_diameter)) minimum_green_intensity[rows, cols] = minimum_green_intensity_iteration[rows, cols] minimum_blue_intensity_iteration = minimum(blue_channel, disk(disk_diameter)) minimum_blue_intensity[rows, cols] = minimum_blue_intensity_iteration[rows, cols] minimum_hue_iteration = minimum(hue_channel, disk(disk_diameter)) minimum_hue[rows, cols] = minimum_hue_iteration[rows, cols] minimum_saturation_iteration = minimum(saturation_channel, disk(disk_diameter)) minimum_saturation[rows, cols] = minimum_saturation_iteration[rows, cols] minimum_value_iteration = minimum(value_channel, disk(disk_diameter)) minimum_value[rows, cols] = minimum_value_iteration[rows, cols] #minimum- small minimum_red_intensity_large_iteration = minimum( red_channel, disk(disk_diameter_large)) minimum_red_intensity_large[ rows, cols] = minimum_red_intensity_large_iteration[rows, cols] minimum_green_intensity_large_iteration = minimum( green_channel, disk(disk_diameter_large)) minimum_green_intensity_large[ rows, cols] = minimum_green_intensity_large_iteration[rows, cols] minimum_blue_intensity_large_iteration = minimum( blue_channel, disk(disk_diameter_large)) minimum_blue_intensity_large[ rows, cols] = minimum_blue_intensity_large_iteration[rows, cols] minimum_hue_large_iteration = minimum(hue_channel, disk(disk_diameter_large)) minimum_hue_large[rows, cols] = minimum_hue_large_iteration[rows, cols] minimum_saturation_large_iteration = minimum(saturation_channel, disk(disk_diameter_large)) minimum_saturation_large[ rows, cols] = minimum_saturation_large_iteration[rows, cols] minimum_value_large_iteration = minimum(value_channel, disk(disk_diameter_large)) minimum_value_large[rows, cols] = minimum_value_large_iteration[rows, cols] #maximum- large maximum_red_intensity_large_iteration = maximum( red_channel, disk(disk_diameter_large)) maximum_red_intensity_large[ rows, cols] = maximum_red_intensity_large_iteration[rows, cols] maximum_green_intensity_large_iteration = maximum( green_channel, disk(disk_diameter_large)) maximum_green_intensity_large[ rows, cols] = maximum_green_intensity_large_iteration[rows, cols] maximum_blue_intensity_large_iteration = maximum( blue_channel, disk(disk_diameter_large)) maximum_blue_intensity_large[ rows, cols] = maximum_blue_intensity_large_iteration[rows, cols] maximum_hue_large_iteration = maximum(hue_channel, disk(disk_diameter_large)) maximum_hue_large[rows, cols] = maximum_hue_large_iteration[rows, cols] maximum_saturation_large_iteration = maximum(saturation_channel, disk(disk_diameter_large)) maximum_saturation_large[ rows, cols] = maximum_saturation_large_iteration[rows, cols] maximum_value_large_iteration = maximum(value_channel, disk(disk_diameter_large)) maximum_value_large[rows, cols] = maximum_value_large_iteration[rows, cols] #maximum- small maximum_red_intensity_iteration = maximum(red_channel, disk(disk_diameter)) maximum_red_intensity[rows, cols] = maximum_red_intensity_iteration[rows, cols] maximum_green_intensity_iteration = maximum(green_channel, disk(disk_diameter)) maximum_green_intensity[rows, cols] = maximum_green_intensity_iteration[rows, cols] maximum_blue_intensity_iteration = maximum(blue_channel, disk(disk_diameter)) maximum_blue_intensity[rows, cols] = maximum_blue_intensity_iteration[rows, cols] maximum_hue_iteration = maximum(hue_channel, disk(disk_diameter)) maximum_hue[rows, cols] = maximum_hue_iteration[rows, cols] maximum_saturation_iteration = maximum(saturation_channel, disk(disk_diameter)) maximum_saturation[rows, cols] = maximum_saturation_iteration[rows, cols] maximum_value_iteration = maximum(value_channel, disk(disk_diameter)) maximum_value[rows, cols] = maximum_value_iteration[rows, cols] #std-large #std red mean_red_intensity_large_iteration1 = mean(red_channel**2, disk(disk_diameter_large)) mean_red_intensity_large1[ rows, cols] = mean_red_intensity_large_iteration1[rows, cols] mean_red_intensity_large_potency_iteration = mean( red_channel, disk(disk_diameter_large)) mean_red_intensity_large_potency[ rows, cols] = mean_red_intensity_large_potency_iteration[rows, cols]**2 std_red = mean_red_intensity_large_potency - mean_red_intensity_large1 std_red = np.abs(std_red) std_red_final = np.sqrt(std_red) #std green mean_green_intensity_large_iteration1 = mean(green_channel**2, disk(disk_diameter_large)) mean_green_intensity_large1[ rows, cols] = mean_green_intensity_large_iteration1[rows, cols] mean_green_intensity_large_potency_iteration = mean( green_channel, disk(disk_diameter_large)) mean_green_intensity_large_potency[ rows, cols] = mean_green_intensity_large_potency_iteration[rows, cols]**2 std_green = mean_green_intensity_large_potency - mean_green_intensity_large1 std_green = np.abs(std_green) std_green_final = np.sqrt(std_green) #std Blue mean_blue_intensity_large_iteration1 = mean(blue_channel**2, disk(disk_diameter_large)) mean_blue_intensity_large1[ rows, cols] = mean_blue_intensity_large_iteration1[rows, cols] mean_blue_intensity_large_potency_iteration = mean( blue_channel, disk(disk_diameter_large)) mean_blue_intensity_large_potency[ rows, cols] = mean_blue_intensity_large_potency_iteration[rows, cols]**2 std_blue = mean_blue_intensity_large_potency - mean_blue_intensity_large1 std_blue = np.abs(std_blue) std_blue_final = np.sqrt(std_blue) #std hue mean_hue_large_iteration1 = mean(hue_channel**2, disk(disk_diameter_large)) mean_hue_large1[rows, cols] = mean_hue_large_iteration1[rows, cols] mean_hue_large_potency_iteration = mean(hue_channel, disk(disk_diameter_large)) mean_hue_large_potency[rows, cols] = mean_hue_large_potency_iteration[ rows, cols]**2 std_hue = mean_hue_large_potency - mean_hue_large1 std_hue = np.abs(std_hue) std_hue_final = np.sqrt(std_hue) #std saturation mean_saturation_large_iteration1 = mean(saturation_channel**2, disk(disk_diameter_large)) mean_saturation_large1[rows, cols] = mean_saturation_large_iteration1[rows, cols] mean_saturation_large_potency_iteration = mean( saturation_channel, disk(disk_diameter_large)) mean_saturation_large_potency[ rows, cols] = mean_saturation_large_potency_iteration[rows, cols]**2 std_saturation = mean_saturation_large_potency - mean_saturation_large1 std_saturation = np.abs(std_saturation) std_saturation_final = np.sqrt(std_saturation) #std Value mean_value_large_iteration1 = mean(value_channel**2, disk(disk_diameter_large)) mean_value_large1[rows, cols] = mean_value_large_iteration1[rows, cols] mean_value_large_potency_iteration = mean(value_channel, disk(disk_diameter_large)) mean_value_large_potency[ rows, cols] = mean_value_large_potency_iteration[rows, cols]**2 std_value = mean_value_large_potency - mean_value_large1 std_value = np.abs(std_value) std_value_final = np.sqrt(std_value) #std-small #std red mean_red_intensity_iteration1 = mean(red_channel**2, disk(disk_diameter)) mean_red_intensity_1[rows, cols] = mean_red_intensity_iteration1[rows, cols] mean_red_intensity_potency_iteration = mean(red_channel, disk(disk_diameter)) mean_red_intensity_potency[ rows, cols] = mean_red_intensity_potency_iteration[rows, cols]**2 std_red_small = mean_red_intensity_potency - mean_red_intensity_1 std_red_small = np.abs(std_red_small) std_red_final_small = np.sqrt(std_red_small) #std green mean_green_intensity_iteration1 = mean(green_channel**2, disk(disk_diameter)) mean_green_intensity_1[rows, cols] = mean_green_intensity_iteration1[rows, cols] mean_green_intensity_potency_iteration = mean(green_channel, disk(disk_diameter)) mean_green_intensity_potency[ rows, cols] = mean_green_intensity_potency_iteration[rows, cols]**2 std_green_small = mean_green_intensity_potency - mean_green_intensity_1 std_green_small = np.abs(std_green_small) std_green_final_small = np.sqrt(std_green_small) #std Blue mean_blue_intensity_iteration1 = mean(blue_channel**2, disk(disk_diameter)) mean_blue_intensity_1[rows, cols] = mean_blue_intensity_iteration1[rows, cols] mean_blue_intensity_potency_iteration = mean(blue_channel, disk(disk_diameter)) mean_blue_intensity_potency[ rows, cols] = mean_blue_intensity_potency_iteration[rows, cols]**2 std_blue_small = mean_blue_intensity_potency - mean_blue_intensity_1 std_blue_small = np.abs(std_blue_small) std_blue_final_small = np.sqrt(std_blue_small) #std hue mean_hue_iteration1 = mean(hue_channel**2, disk(disk_diameter)) mean_hue_1[rows, cols] = mean_hue_iteration1[rows, cols] mean_hue_potency_iteration = mean(hue_channel, disk(disk_diameter)) mean_hue_potency[rows, cols] = mean_hue_potency_iteration[rows, cols]**2 std_hue_small = mean_hue_potency - mean_hue_1 std_hue_small = np.abs(std_hue_small) std_hue_final_small = np.sqrt(std_hue_small) #std saturation mean_saturation_iteration1 = mean(saturation_channel**2, disk(disk_diameter)) mean_saturation_1[rows, cols] = mean_saturation_iteration1[rows, cols] mean_saturation_potency_iteration = mean(saturation_channel, disk(disk_diameter)) mean_saturation_potency[ rows, cols] = mean_saturation_potency_iteration[rows, cols]**2 std_saturation_small = mean_saturation_potency - mean_saturation_1 std_saturation_small = np.abs(std_saturation_small) std_saturation_final_small = np.sqrt(std_saturation_small) #std Value mean_value_iteration1 = mean(value_channel**2, disk(disk_diameter)) mean_value_1[rows, cols] = mean_value_iteration1[rows, cols] mean_value_potency_iteration = mean(value_channel, disk(disk_diameter)) mean_value_potency[rows, cols] = mean_value_potency_iteration[rows, cols]**2 std_value_small = mean_value_potency - mean_value_1 std_value_small = np.abs(std_value_small) std_value_final_small = np.sqrt(std_value_small) #print(mean_intensity) print(i, ':', disk_diameter) return mean_red_intensity_large, mean_green_intensity_large, mean_blue_intensity_large, mean_hue_large, mean_saturation_large, mean_value_large, mean_red_intensity, mean_green_intensity, mean_blue_intensity, mean_hue, mean_saturation, mean_value, minimum_red_intensity_large, minimum_green_intensity_large, minimum_blue_intensity_large, minimum_hue_large, minimum_saturation_large, minimum_value_large, minimum_red_intensity, minimum_green_intensity, minimum_blue_intensity, minimum_hue, minimum_saturation, minimum_value, maximum_red_intensity_large, maximum_green_intensity_large, maximum_blue_intensity_large, maximum_hue_large, maximum_saturation_large, maximum_value_large, maximum_red_intensity, maximum_green_intensity, maximum_blue_intensity, maximum_hue, maximum_saturation, maximum_value, std_red_final, std_green_final, std_blue_final, std_hue_final, std_saturation_final, std_value_final, std_red_final_small, std_green_final_small, std_blue_final_small, std_hue_final_small, std_saturation_final_small, std_value_final_small