示例#1
0
def test_is_even():
    even_number = random.randrange(2, 1000, 2)
    odd_number = random.randrange(1, 1001, 2)
    with pytest.raises(NotImplementedError):
        assert tools.is_even(0)
    assert tools.is_even(even_number)
    assert not tools.is_even(odd_number)
示例#2
0
    def __get_stacks(self, index):
        centre_z = self.batches[index][0].z

        half_cube_depth = self.num_planes_needed_for_cube // 2
        min_plane = centre_z - half_cube_depth

        if is_even(self.num_planes_needed_for_cube):
            # WARNING: not centered because even
            max_plane = centre_z + half_cube_depth
        else:
            # centered
            max_plane = centre_z + half_cube_depth + 1

        signal_stack = np.empty((
            self.num_planes_needed_for_cube,
            self.image_height,
            self.image_width,
        ))
        background_stack = np.empty_like(signal_stack)
        for plane, plane_path in enumerate(
                self.signal_planes[min_plane:max_plane]):
            signal_stack[plane] = tifffile.imread(plane_path)

        for plane, plane_path in enumerate(
                self.background_planes[min_plane:max_plane]):
            background_stack[plane] = tifffile.imread(plane_path)

        return signal_stack, background_stack
示例#3
0
def pad_center_2d(img, x_size=None, y_size=None, pad_mode="edge"):
    """
    Pads the edges of a 2D image, and returns a larger image. If the desired
    dimension is smaller than the original dimension, nothing is changed.
    :param img: 2D input image
    :param x_size: New length in x (default: None, which does nothing)
    :param y_size: New length in y (default: None, which does nothing)
    :return: New, larger array
    """

    y, x = img.shape

    #  TODO: simplify

    if x_size is None:
        x_pad = 0
    elif x_size <= x:
        x_pad = 0
    else:
        x_pad = x_size - x

    if y_size is None:
        y_pad = 0
    elif y_size <= y:
        y_pad = 0
    else:
        y_pad = y_size - y

    if x_pad > 0:
        if tools.is_even(x_pad):
            x_front = x_back = int(x_pad / 2)
        else:
            x_front = int(x_pad // 2)
            x_back = int(x_front + 1)
    else:
        x_front = x_back = 0

    if y_pad > 0:
        if tools.is_even(y_pad):
            y_front = y_back = int(y_pad / 2)
        else:
            y_front = int(y_pad // 2)
            y_back = int(y_front + 1)
    else:
        y_front = y_back = 0

    return np.pad(img, ((y_front, y_back), (x_front, x_back)), pad_mode)
示例#4
0
def main(args):

    start_time = datetime.now()

    cells = get_cells(args.paths.cells_file_path)
    if not cells:
        logging.error("No cells found, exiting. Please check your "
                      "cell xml file path: {}"
                      " or verify your cell types "
                      "(maybe use cells-only option to disable)".format(
                          args.paths.cells_file_path))
        raise ValueError("No cells found, exiting. Please check your "
                         "cell xml file path: {}"
                         " or verify your cell types "
                         "(maybe use cells-only option to disable)".format(
                             args.paths.cells_file_path))

    if args.z_pixel_um != args.z_pixel_um_network:
        plane_scaling_factor = args.z_pixel_um_network / args.z_pixel_um
        num_planes_needed_for_cube = round(args.cube_depth *
                                           plane_scaling_factor)
    else:
        num_planes_needed_for_cube = args.cube_depth

    planes_paths = {}
    # Use args.paths for this
    all_channel_ids = args.signal_ch_ids + [args.background_ch_id]
    for idx, planes_paths_file_path in enumerate(args.all_planes_paths):
        channel = all_channel_ids[idx]

        if args.cube_extract_cli:
            channel_list = all_channel_ids
            args.signal_channel = all_channel_ids[0]
        else:
            # only extract those channels that are necessary for classification
            channel_list = [args.signal_channel, args.background_ch_id]
        if channel in channel_list:
            planes_paths[channel] = system.get_sorted_file_paths(
                planes_paths_file_path, file_extension="tif")

    if num_planes_needed_for_cube > len(planes_paths[0]):
        raise StackSizeError("The number of planes provided is not sufficient "
                             "for any cubes to be extracted. Please check the "
                             "input data")

    first_plane = tifffile.imread(list(planes_paths.values())[0][0])

    planes_shape = first_plane.shape
    brain_depth = len(list(planes_paths.values())[0])

    # TODO: use to assert all centre planes processed
    center_planes = sorted(list(set([cell.z for cell in cells])))

    # REFACTOR: rename (clashes with different meaning of planes_to_read below)
    planes_to_read = np.zeros(brain_depth, dtype=np.bool)

    if tools.is_even(num_planes_needed_for_cube):
        half_nz = num_planes_needed_for_cube // 2
        # WARNING: not centered because even
        for p in center_planes:
            planes_to_read[p - half_nz:p + half_nz] = 1
    else:
        half_nz = num_planes_needed_for_cube // 2
        # centered
        for p in center_planes:
            planes_to_read[p - half_nz:p + half_nz + 1] = 1

    planes_to_read = np.where(planes_to_read)[0]

    if not planes_to_read.size:
        logging.error(
            f"No planes found, you need at the very least "
            f"{num_planes_needed_for_cube} "
            f"planes to proceed (i.e. cube z size)"
            f"Brain z dimension is {brain_depth}.",
            stack_info=True,
        )
        raise ValueError(f"No planes found, you need at the very least "
                         f"{num_planes_needed_for_cube} "
                         f"planes to proceed (i.e. cube z size)"
                         f"Brain z dimension is {brain_depth}.")
    # TODO: check if needs to flip args.cube_width and args.cube_height
    cells_groups = cell_tools.group_cells_by_z(cells)

    # copies=2 is set because at all times there is a plane queue (deque)
    # and an array passed to `Cube`
    ram_per_process = get_ram_requirement_per_process(
        planes_paths[args.signal_channel][0],
        num_planes_needed_for_cube,
        copies=2,
    )
    n_processes = system.get_num_processes(
        min_free_cpu_cores=args.n_free_cpus,
        ram_needed_per_process=ram_per_process,
        n_max_processes=len(planes_to_read),
        fraction_free_ram=0.2,
        max_ram_usage=system.memory_in_bytes(args.max_ram, "GB"),
    )
    # TODO: don't need to extract cubes from all channels if
    #  n_signal_channels>1
    with ProcessPoolExecutor(max_workers=n_processes) as executor:
        n_planes_per_chunk = len(planes_to_read) // n_processes
        for i in range(n_processes):
            start_idx = i * n_planes_per_chunk
            end_idx = (start_idx + n_planes_per_chunk +
                       num_planes_needed_for_cube - 1)
            if end_idx > planes_to_read[-1]:
                end_idx = None
            sub_planes_to_read = planes_to_read[start_idx:end_idx]

            executor.submit(
                save_cubes,
                cells_groups,
                planes_paths,
                sub_planes_to_read,
                planes_shape,
                args.x_pixel_um,
                args.y_pixel_um,
                args.x_pixel_um_network,
                args.y_pixel_um_network,
                num_planes_for_cube=num_planes_needed_for_cube,
                cube_width=args.cube_width,
                cube_height=args.cube_height,
                cube_depth=args.cube_depth,
                thread_id=i,
                output_dir=args.paths.tmp__cubes_output_dir,
                save_empty_cubes=args.save_empty_cubes,
            )

    total_cubes = system.get_number_of_files_in_dir(
        args.paths.tmp__cubes_output_dir)
    time_taken = datetime.now() - start_time
    logging.info("All cubes ({}) extracted in: {}".format(
        total_cubes, time_taken))
示例#5
0
def save_cubes(
    cells,
    planes_paths,
    planes_to_read,
    planes_shape,
    x_pix_um,
    y_pix_um,
    x_pix_um_network,
    y_pix_um_network,
    num_planes_for_cube=20,
    cube_width=50,
    cube_height=50,
    cube_depth=20,
    thread_id=0,
    output_dir="",
    save_empty_cubes=False,
):
    """

    :param cells:
    :param planes_paths:
    :param planes_to_read:
    :param planes_shape:
    :param x_pix_um:
    :param y_pix_um:
    :param x_pix_um_network:
    :param y_pix_um_network:
    :param num_planes_for_cube:
    :param cube_width:
    :param cube_height:
    :param cube_depth:
    :param thread_id:
    :param output_dir:
    :param save_empty_cubes:
    :return:
    """
    channels = list(planes_paths.keys())
    stack_shape = planes_shape + (num_planes_for_cube, )
    stacks = {}
    planes_queues = {}
    for ch in channels:
        stacks[ch] = np.zeros(stack_shape, dtype=np.uint16)
        planes_queues[ch] = deque(maxlen=num_planes_for_cube)
    for plane_idx in tqdm(planes_to_read, desc="Thread: {}".format(thread_id)):
        for ch in channels:
            plane_path = planes_paths[ch][plane_idx]
            planes_queues[ch].append(tifffile.imread(plane_path))
            if len(planes_queues[ch]) == num_planes_for_cube:
                if tools.is_even(num_planes_for_cube):
                    cell_z = int(plane_idx - num_planes_for_cube / 2 + 1)
                else:
                    cell_z = int(plane_idx - floor(num_planes_for_cube) / 2 +
                                 1)

                for j, plane in enumerate(planes_queues[ch]):
                    stacks[ch][:, :, j] = plane

                # ensures no cube_depth planes at the end
                planes_queues[ch].popleft()
                # required since we provide all cells
                # TODO: if len(planes_queues[ch])
                #  < num_planes_for_cube -1: break
                for cell in cells[cell_z]:
                    cube = Cube(
                        cell,
                        ch,
                        stacks,
                        x_pix_um=x_pix_um,
                        y_pix_um=y_pix_um,
                        x_pix_um_network=x_pix_um_network,
                        y_pix_um_network=y_pix_um_network,
                        final_depth=cube_depth,
                        width=cube_width,
                        height=cube_height,
                        depth=num_planes_for_cube,
                    )
                    if not cube.empty or (cube.empty and save_empty_cubes):
                        tifffile.imsave(os.path.join(output_dir, str(cube)),
                                        cube.data)
示例#6
0
def main(
    cells,
    cubes_output_dir,
    planes_paths,
    cube_depth,
    cube_width,
    cube_height,
    x_pixel_um,
    y_pixel_um,
    z_pixel_um,
    x_pixel_um_network,
    y_pixel_um_network,
    z_pixel_um_network,
    max_ram,
    n_free_cpus=4,
    save_empty_cubes=False,
):

    start_time = datetime.now()

    if z_pixel_um != z_pixel_um_network:
        plane_scaling_factor = z_pixel_um_network / z_pixel_um
        num_planes_needed_for_cube = round(cube_depth * plane_scaling_factor)
    else:
        num_planes_needed_for_cube = cube_depth

    if num_planes_needed_for_cube > len(planes_paths[0]):
        raise StackSizeError("The number of planes provided is not sufficient "
                             "for any cubes to be extracted. Please check the "
                             "input data")

    first_plane = tifffile.imread(list(planes_paths.values())[0][0])

    planes_shape = first_plane.shape
    brain_depth = len(list(planes_paths.values())[0])

    # TODO: use to assert all centre planes processed
    center_planes = sorted(list(set([cell.z for cell in cells])))

    # REFACTOR: rename (clashes with different meaning of planes_to_read below)
    planes_to_read = np.zeros(brain_depth, dtype=np.bool)

    if tools.is_even(num_planes_needed_for_cube):
        half_nz = num_planes_needed_for_cube // 2
        # WARNING: not centered because even
        for p in center_planes:
            planes_to_read[p - half_nz:p + half_nz] = 1
    else:
        half_nz = num_planes_needed_for_cube // 2
        # centered
        for p in center_planes:
            planes_to_read[p - half_nz:p + half_nz + 1] = 1

    planes_to_read = np.where(planes_to_read)[0]

    if not planes_to_read.size:
        logging.error(
            f"No planes found, you need at the very least "
            f"{num_planes_needed_for_cube} "
            f"planes to proceed (i.e. cube z size)"
            f"Brain z dimension is {brain_depth}.",
            stack_info=True,
        )
        raise ValueError(f"No planes found, you need at the very least "
                         f"{num_planes_needed_for_cube} "
                         f"planes to proceed (i.e. cube z size)"
                         f"Brain z dimension is {brain_depth}.")
    # TODO: check if needs to flip args.cube_width and args.cube_height
    cells_groups = group_cells_by_z(cells)

    # copies=2 is set because at all times there is a plane queue (deque)
    # and an array passed to `Cube`
    ram_per_process = get_ram_requirement_per_process(
        planes_paths[0][0],
        num_planes_needed_for_cube,
        copies=2,
    )
    n_processes = get_num_processes(
        min_free_cpu_cores=n_free_cpus,
        ram_needed_per_process=ram_per_process,
        n_max_processes=len(planes_to_read),
        fraction_free_ram=0.2,
        max_ram_usage=system.memory_in_bytes(max_ram, "GB"),
    )
    # TODO: don't need to extract cubes from all channels if
    #  n_signal_channels>1
    with ProcessPoolExecutor(max_workers=n_processes) as executor:
        n_planes_per_chunk = len(planes_to_read) // n_processes
        for i in range(n_processes):
            start_idx = i * n_planes_per_chunk
            end_idx = (start_idx + n_planes_per_chunk +
                       num_planes_needed_for_cube - 1)
            if end_idx > planes_to_read[-1]:
                end_idx = None
            sub_planes_to_read = planes_to_read[start_idx:end_idx]

            executor.submit(
                save_cubes,
                cells_groups,
                planes_paths,
                sub_planes_to_read,
                planes_shape,
                x_pixel_um,
                y_pixel_um,
                x_pixel_um_network,
                y_pixel_um_network,
                num_planes_for_cube=num_planes_needed_for_cube,
                cube_width=cube_width,
                cube_height=cube_height,
                cube_depth=cube_depth,
                thread_id=i,
                output_dir=cubes_output_dir,
                save_empty_cubes=save_empty_cubes,
            )

    total_cubes = system.get_number_of_files_in_dir(cubes_output_dir)
    time_taken = datetime.now() - start_time
    logging.info("All cubes ({}) extracted in: {}".format(
        total_cubes, time_taken))