def count_droplets_doh(img): stats = ImageStats(img) s = stretch_composite_histogram(img, stats) blobs = doh(img) if DEBUG: make_circles_fig(s, blobs).show() return len(blobs)
def count_droplets_log(img): stats = ImageStats(img) s = stretch_composite_histogram(img, stats) blobs = log(s) if DEBUG: make_circles_fig(s, blobs, title=f"log {len(blobs)}").show() return len(blobs)
def main(): start = time.monotonic() image_pth = Path(os.path.dirname( os.path.realpath(__file__))) / Path("../simulation/screenshot.png") img_orig = imread(image_pth, as_gray=True) filtered_img = gaussian(img_orig, sigma=1) s2 = stretch_composite_histogram(filtered_img) blobs = blob_dog(s2, max_sigma=10, min_sigma=1, threshold=0.001, overlap=0.5, sigma_ratio=1.01) print("dog time ", time.monotonic() - start) blobs[:, 2] = blobs[:, 2] * sqrt(2) print(len(blobs))
def preprocess_times(): image_pth = Path(os.path.dirname(os.path.realpath(__file__))) / Path( "../simulation/screenshot.png" ) # image_pth = Path(os.path.dirname(os.path.realpath(__file__))) / Path( # "../data/RawData/R109_60deg_6-8-25_OHP-LS000252.T000.D000.P000.H000.PLIF1.TIF" # ) img_cpu = io.imread(image_pth) stretch_times = [] gaussian_times = [] img_as_float_times = [] torch_times = [] for i in range(REPEATS): START = time.monotonic() img = img_as_float(img_cpu) END = time.monotonic() img_as_float_times.append(END - START) START = time.monotonic() img = stretch_composite_histogram(img) END = time.monotonic() stretch_times.append(END - START) START = time.monotonic() img = gaussian(img, sigma=1) END = time.monotonic() gaussian_times.append(END - START) START = time.monotonic() img = torch.from_numpy(img).float() END = time.monotonic() torch_times.append(END - START) print(f"img_as_float times {np.mean(img_as_float_times)}") print(f"stretch times {np.mean(stretch_times)}") print(f"gaussian times {np.mean(gaussian_times)}") print(f"torch times {np.mean(torch_times)}") print() stretch_times = [] gaussian_times = [] img_as_float_times = [] torch_times = [] for i in range(REPEATS): START = time.monotonic() img = stretch_composite_histogram(img_cpu) END = time.monotonic() stretch_times.append(END - START) START = time.monotonic() img = img_as_float(img) END = time.monotonic() img_as_float_times.append(END - START) START = time.monotonic() img = gaussian(img, sigma=1) END = time.monotonic() gaussian_times.append(END - START) START = time.monotonic() img = torch.from_numpy(img).float() END = time.monotonic() torch_times.append(END - START) print(f"stretch times {np.mean(stretch_times)}") print(f"gaussian times {np.mean(gaussian_times)}") print(f"img_as_float times {np.mean(img_as_float_times)}") print(f"torch times {np.mean(torch_times)}")
def transform(self, img): img = img_as_float(img) img = gaussian(img, sigma=1) img = stretch_composite_histogram(img) img = torch.from_numpy(img).float() return img
def test_dog(img_orig, show_fig=True): fig, ((ax1, ax2), (ax3, ax4), (ax5, ax6), (ax7, ax8), (ax9, ax10)) = plt.subplots(nrows=5, ncols=2, figsize=(24, 48)) fig.tight_layout() ax1.axis("off") # ax2.axis("off") ax3.axis("off") ax4.axis("off") ax5.axis("off") ax6.axis("off") ax7.axis("off") ax8.axis("off") # ax9.axis("off") # ax10.axis("off") im_fft = np.fft.fftshift(fftpack.fft2(img_orig)) power = np.abs(im_fft) / (np.sum(np.abs(im_fft)**2) / im_fft.size) ax1.imshow(power, norm=LogNorm()) ax1.set_title("log power") threshed_power = power > 1e-5 power_vals = np.hstack( [np.argwhere(threshed_power), power[threshed_power, np.newaxis]]) pca = decomposition.PCA(n_components=2) pca.fit_transform(power_vals) center = list(pca.mean_[:2])[::-1] for length, vector in zip(pca.explained_variance_, pca.components_): length = np.sqrt(length) * 10 print(length) v = vector[:2][::-1] * length draw_vector(center, center + v, ax1) ax2.hist(np.log(power.ravel()), bins=256, density=True) ax2.set_title("power distribution") filtered_img = gaussian(img_orig, sigma=1) log_img = np.log(filtered_img) print(log_img.min(), log_img.max()) ax3.imshow(log_img, cmap="gray") ax3.set_title("log scaled") blobs = blob_dog(log_img, max_sigma=10, min_sigma=5, threshold=1, overlap=0.8) blobs[:, 2] = blobs[:, 2] * sqrt(2) ax4.imshow(log_img, cmap="gray") ax4.set_title(f"log blobs {len(blobs)}") for y, x, r in blobs: c = plt.Circle((x, y), r, color="red", linewidth=0.5, fill=False) ax4.add_patch(c) s2 = stretch_composite_histogram(filtered_img) ax5.imshow(s2, cmap="gray") ax5.set_title("constrast stretched") # # blobs = blob_dog(s2, max_sigma=10, min_sigma=1, threshold=0.02, overlap=0.8) # blobs[:, 2] = blobs[:, 2] * sqrt(2) # ax6.imshow(s2, cmap="gray") # ax6.set_title(f"contrast stretched blobs {len(blobs)}") # for y, x, r in blobs: # c = plt.Circle((x, y), r, color="red", linewidth=0.5, fill=False) # ax6.add_patch(c) # ax10.hist([r for _, _, r in blobs], bins=256, density=True) # ax10.set_title("constrast stretched bubble distribution") # denoised = denoise(s2) # ax7.imshow(denoised, cmap="gray") # ax7.set_title("n2n denoised") # blobs = blob_dog(denoised, max_sigma=10, min_sigma=5, threshold=0.02, overlap=0.5) # blobs[:, 2] = blobs[:, 2] * sqrt(2) # ax8.imshow(denoised, cmap="gray") # ax8.set_title(f"denoised blobs {len(blobs)}") # for y, x, r in blobs: # c = plt.Circle((x, y), r, color="red", linewidth=0.5, fill=False) # ax8.add_patch(c) # # print([r for _, _, r in blobs]) # ax9.hist([r for _, _, r in blobs], bins=256, density=True) # ax9.set_title("denoised bubble distribution") if show_fig: fig.show() else: fig.savefig(f"processed_images/{img_fp}.png", dpi=96)