Ejemplo n.º 1
0
def rescale_main(args):
    nb_workers = _check_workers(args)

    # Find all TIFFs
    paths, filenames = tifs_in_dir(args.input)
    verbose_print(args, f"Found {len(paths)} TIFFs")

    # Load histogram and compute percentile from CDF
    df = pd.read_csv(args.histogram)
    bins = df['intensity'].to_numpy()
    counts = df['count'].to_numpy()
    total = counts.sum()
    cdf = np.cumsum(counts)
    target = total * (args.p / 100)
    abs_diff = np.abs(cdf - target)
    idx = np.where(abs_diff == abs_diff.min())[0]
    max_val = bins[idx][0]
    # min_val, max_val = bins[0], bins[-1]

    # Make the output folder
    os.makedirs(args.output, exist_ok=True)

    # Rescale images in parallel
    verbose_print(args, f"Rescaling images with {nb_workers} workers:")
    args_list = []
    for path, filename in zip(paths, filenames):
        args_list.append((path, args.t, max_val, args.output, filename, args.c))
    with multiprocessing.Pool(nb_workers) as pool:
        list(tqdm.tqdm(pool.imap(_rescale_image, args_list), total=len(paths)))

    verbose_print(args, f"Rescaling done!")
Ejemplo n.º 2
0
def old_preprocessing_main(args):
    if args.t is None and args.s is None and args.k is None:
        raise ValueError('No preprocessing tasks were specified')

    verbose_print(args, f"Preprocessing {args.input}")

    if os.path.isdir(args.input):
        # Load series of 2D TIFFs and process in parallel
        paths, filenames = tifs_in_dir(args.input)

        img = io.imread(paths[0])
        shape = (len(paths), *img.shape)
        if args.float:
            dtype = 'float32'
        else:
            dtype = img.dtype

        arr = io.new_zarr(args.zarr, shape=shape, dtype=dtype, chunks=tuple(args.c))

        args_list = []
        for i, (path, _) in enumerate(zip(paths, filenames)):
            args_list.append((args, path, arr, i))

        with multiprocessing.Pool(multiprocessing.cpu_count()) as pool:
            list(tqdm.tqdm(pool.imap_unordered(_preprocess_image2d, args_list), total=len(args_list)))

        if args.p is not None:
            before = io.imread(paths[args.p])
            after = arr[args.p]

    elif os.path.isdir(args.input):
        # Load 3D TIFF and process in memory
        img = io.imread(args.input)
        # Keep reference to before image if plotting
        if args.p is not None:
            before = np.copy(img[args.p])
        verbose_print(args, f"Loaded image: {img.shape} {img.dtype}")
        img = preprocess_image3d(args, img)
        if args.p is not None:
            after = np.copy(img[args.p])

    else:
        raise ValueError('Input is not a valid directory or file')

    # Show A/B plot
    if args.p is not None:
        plt.subplot(121)
        plt.imshow(before)
        plt.title('Before')
        plt.subplot(122)
        plt.imshow(after)
        plt.title('After')
        plt.show()

    verbose_print(args, f"Preprocessing done!")
Ejemplo n.º 3
0
def stack_main(args):
    verbose_print(args, f'Stacking images in {args.input}')

    paths, filenames = utils.tifs_in_dir(args.input)
    verbose_print(args, f'Found {len(paths)} images')

    img0 = io.imread(paths[0])
    shape2d, dtype = img0.shape, img0.dtype
    img = np.empty((len(paths), *shape2d), dtype)
    for z, path in tqdm(enumerate(paths), total=len(paths)):
        img[z] = io.imread(path)

    io.imsave(args.output, img, compress=1)

    verbose_print(args, f'Stacking done!')
Ejemplo n.º 4
0
def denoise_main(args):
    # Initial setup
    nb_workers = _check_workers(args)
    os.makedirs(args.output, exist_ok=True)

    # Find all TIFFs
    paths, _ = tifs_in_dir(args.input)
    verbose_print(args, f"Found {len(paths)} TIFFs")

    # Curry denoising function for pmap
    f = partial(denoise2d, sigma=args.s, wavelet=args.w)
    g = partial(read_process_write, f=f, output=args.output, compress=args.c)

    # Parallel read, denoise, write
    verbose_print(args, f"Denoising with {nb_workers} workers:")
    with multiprocessing.Pool(nb_workers) as pool:
        list(tqdm.tqdm(pool.imap(g, paths), total=len(paths)))

    verbose_print(args, f"Denoising done!")
Ejemplo n.º 5
0
def downsample_main(args):
    if args.n is None:
        nb_workers = multiprocessing.cpu_count()
    else:
        nb_workers = args.n

    verbose_print(args,
                  f'Downsampling {args.input} with factors {args.factor}')

    if args.tiff:
        os.makedirs(args.output, exist_ok=True)
        paths, filenames = utils.tifs_in_dir(args.input)

        args_list = []
        for path, filename in zip(paths, filenames):
            args_list.append((path, args.factor, args.output, filename))
        with multiprocessing.Pool(nb_workers) as pool:
            pool.starmap(read_downsample_write, args_list)

        # for i, (path, filename) in enumerate(zip(paths, filenames)):
        #     verbose_print(args, f'Downsampling {filename}')
        #     arr = io.imread(path)
        #     if isinstance(args.factor, int):
        #         factors = tuple(args.factor for _ in range(arr.ndim))
        #     else:
        #         factors = tuple(args.factor)
        #     data = downsample(arr, factors)
        #     output = os.path.join(args.output, filename)
        #     io.imsave(output, data, compress=3)

    else:
        arr = io.open(args.input, mode='r')
        if isinstance(args.factor, int):
            factors = tuple(args.factor for _ in range(arr.ndim))
        else:
            factors = tuple(args.factor)
        data = downsample(arr, factors)
        verbose_print(args, f'Writing result to {args.output}')
        io.imsave(args.output, data, compress=3)

    verbose_print(args, f'Downsampling done!')
Ejemplo n.º 6
0
def imread_folder(path, nb_workers):
    """
    Finds all TIFF images in a folder and loads them into a single array.

    **Note:** all images must be the same shape to be able to stack them.

    Parameters
    ----------
    path : str
        Path to directory with TIFF images in alphabetical order
    nb_workers : int
        Number of parallel processes to use in reading images

    Returns
    -------
    data : ndarray
        Image array
    """
    paths, _ = tifs_in_dir(path)
    data = imread_parallel(paths, nb_workers)
    return data
Ejemplo n.º 7
0
def histogram_main(args):
    # Find all TIFFs
    paths, _ = tifs_in_dir(args.input)
    verbose_print(args, f"Found {len(paths)} TIFFs")

    # Estimate histogram
    sample_paths = downsample_paths(paths, step=args.s)
    verbose_print(args, f"Calculating histogram from {len(sample_paths)} images:")
    hist, bin_centers = estimate_histogram(sample_paths)

    # Show plot
    if args.plot:
        plt.plot(bin_centers, hist)
        plt.show()

    # Build CSV
    df = pd.DataFrame({'intensity': bin_centers, 'count': hist})
    df.to_csv(args.output, index=False)
    verbose_print(args, f"Histogram saved to {args.output}")

    verbose_print(args, f"Histogram done!")
Ejemplo n.º 8
0
def convert_main(args):
    nb_workers = _check_workers(args)

    verbose_print(args, f"Converting {args.input} to Zarr")

    # Find all TIFFs
    paths, filenames = tifs_in_dir(args.input)
    verbose_print(args, f"Found {len(paths)} TIFFs")
    paths_chunked = [paths[pos:pos + args.c[0]] for pos in range(0, len(paths), args.c[0])]

    img = io.imread(paths[0])
    shape = (len(paths), *img.shape)
    dtype = img.dtype
    chunks = tuple(args.c)
    arr = io.new_zarr(args.output, shape=shape, dtype=dtype, chunks=chunks)

    verbose_print(args, f"Writiing to {args.output}")
    args_list = []
    for i, paths_batch in enumerate(paths_chunked):
        args_list.append((paths_batch, i, chunks[0], arr))
    with multiprocessing.Pool(nb_workers) as pool:
        list(tqdm.tqdm(pool.imap(_convert_batch, args_list), total=len(args_list)))

    verbose_print(args, f"Conversion done!")
Ejemplo n.º 9
0
def contrast_main(args):
    # Initial setup
    nb_workers = _check_workers(args)

    if args.k is None:
        verbose_print(args, f"Performing histogram equalization with default kernel size")
        kernel_size = None
    else:
        verbose_print(args, f"Performing histogram equalization with kernel size {args.k}")
        kernel_size = args.k

    # Find all TIFFs
    paths, filenames = tifs_in_dir(args.input)
    verbose_print(args, f"Found {len(paths)} TIFFs")

    # Make output folder
    os.makedirs(args.output, exist_ok=True)

    for path, filename in tqdm.tqdm(zip(paths, filenames), total=len(paths)):
        img = io.imread(path)
        adjusted = equalize_adapthist(img, kernel_size=kernel_size).astype(np.float32)
        io.imsave(os.path.join(args.output, filename), adjusted, compress=args.c)

    verbose_print(args, f"Contrast done!")