def as_cell(self, force_typed=True): if force_typed: return (Cell(self.path, -1) if self.label is None else Cell( self.path, self.label)) else: return (UntypedCell(self.path) if self.label is None else Cell( self.path, self.label))
def get_results(self): logging.info("Splitting cell clusters and writing results") max_cell_volume = sphere_volume(self.soma_size_spread_factor * self.soma_diameter / 2) cells = [] for ( cell_id, cell_points, ) in self.cell_detector.get_coords_list().items(): cell_volume = len(cell_points) if cell_volume < max_cell_volume: cell_centre = get_structure_centre_wrapper(cell_points) cells.append( Cell( (cell_centre["x"], cell_centre["y"], cell_centre["z"]), Cell.UNKNOWN, )) else: if cell_volume < self.max_cluster_size: try: cell_centres = split_cells( cell_points, outlier_keep=self.outlier_keep) except (ValueError, AssertionError) as err: raise StructureSplitException( f"Cell {cell_id}, error; {err}") for cell_centre in cell_centres: cells.append( Cell( ( cell_centre["x"], cell_centre["y"], cell_centre["z"], ), Cell.UNKNOWN, )) else: cell_centre = get_structure_centre_wrapper(cell_points) cells.append( Cell( ( cell_centre["x"], cell_centre["y"], cell_centre["z"], ), Cell.ARTIFACT, )) xml_file_path = os.path.join(self.output_folder, self.output_file + ".xml") save_cells( cells, xml_file_path, save_csv=self.save_csv, artifact_keep=self.artifact_keep, )
def get_cells_dir(cells_file_path, cell_type=None): cells = [] for file in os.listdir(cells_file_path): # ignore hidden files if not file.startswith("."): cells.append(Cell(file, cell_type)) return cells
def test_get_cells(): cells = cell_io.get_cells(xml_path) assert len(cells) == 65 assert Cell([2536, 523, 1286], 1) == cells[64] cells = cell_io.get_cells(yml_path) assert len(cells) == 250 assert Cell([9170, 2537, 311], 1) == cells[194] cells = cell_io.get_cells(cubes_dir) assert len(cells) == 4 assert natsorted(cubes_cells) == natsorted(cells) cells = cell_io.get_cells(roi_sorter_output_dir) assert len(cells) == 4 assert natsorted(roi_sorter_cells) == natsorted(cells) with pytest.raises(NotImplementedError): assert cell_io.get_cells("misc_format.abc")
def convert_layer_to_cells(layer_data, cells=True): cells_to_save = [] if cells: type = Cell.CELL else: type = Cell.UNKNOWN for idx, point in enumerate(layer_data): cell = Cell([point[2], point[1], point[0]], type) cells_to_save.append(cell) return cells_to_save
def get_cells_xml(xml_file_path, cells_only=False): with open(xml_file_path, "r") as xml_file: root = ElementTree.parse(xml_file).getroot() cells = [] for type_marker in root.find("Marker_Data").findall("Marker_Type"): cell_type = int(type_marker.find("Type").text) for cell_element in type_marker.findall("Marker"): cells.append(Cell(cell_element, cell_type)) if not cells: raise MissingCellsError( "No cells found in file {}".format(xml_file_path)) if cells_only: cells = [c for c in cells if c.is_cell()] return cells
def save_cell_count(self): self.status_label.setText("Saving cells") print("Saving cells") self.get_output_directory() filename = self.output_directory / "cells.xml" cells_to_save = [] for idx, point in enumerate(self.cell_layer.data): cell = Cell([point[2], point[1], point[0]], Cell.CELL) cells_to_save.append(cell) save_cells(cells_to_save, str(filename)) self.status_label.setText("Ready") print("Done!")
def get_cells_yml(cells_file_path, ignore_type=False, marker="markers"): if not ignore_type: raise NotImplementedError( "Parsing cell types is not yet implemented for YAML files. " "Currently the only option is to merge them. Please try again with" " 'ignore_type=True'.") else: with open(cells_file_path, "r") as yml_file: data = yaml.safe_load(yml_file) cells = [] for cell_type in list(data.keys()): type_dict = data[cell_type] if marker in type_dict.keys(): for cell in type_dict[marker]: cells.append(Cell(cell, Cell.UNKNOWN)) return cells
def save_curation(viewer): """Save file""" if not CURATED_POINTS: print("No cells have been confirmed or toggled, not saving") else: unique_cells = unique_elements_lists(CURATED_POINTS) points = viewer.layers[1].data[unique_cells] labels = viewer.layers[1].properties["cell"][unique_cells] labels = labels.astype("int") labels = labels + 1 cells_to_save = [] for idx, point in enumerate(points): cell = Cell([point[2], point[1], point[0]], labels[idx]) cells_to_save.append(cell) print(f"Saving results to: {output_filename}") save_cells(cells_to_save, output_filename)
1275, 1278, 1286, 1278, 1288, 1295, 1279, 1282, 1275, 1276, 1275, 1286, ] cubes_cells = [ Cell([340, 1004, 15], 1), Cell([340, 1004, 15], 1), Cell([392, 522, 10], 1), Cell([392, 522, 10], 1), ] roi_sorter_cells = [ Cell([4056, 564, 358], 1), Cell([3989, 267, 570], 1), Cell([4351, 735, 439], 1), Cell([4395, 677, 367], 1), ] def test_get_cells(): cells = cell_io.get_cells(xml_path)
def test_cube_extraction(tmpdir, depth=20): tmpdir = str(tmpdir) args = CubeExtractArgs(tmpdir) planes_paths = {} planes_paths[0] = get_sorted_file_paths(signal_data_dir, file_extension="tif") planes_paths[1] = get_sorted_file_paths(background_data_dir, file_extension="tif") extract_cubes.main( get_cells(args.paths.cells_file_path), args.paths.tmp__cubes_output_dir, planes_paths, args.cube_depth, args.cube_width, args.cube_height, args.voxel_sizes, args.network_voxel_sizes, args.max_ram, args.n_free_cpus, args.save_empty_cubes, ) validation_cubes = load_cubes_in_dir(validate_cubes_dir) test_cubes = load_cubes_in_dir(tmpdir) for idx, test_cube in enumerate(test_cubes): assert (validation_cubes[idx] == test_cube).all() delete_directory_contents(tmpdir) # test cube scaling args.voxel_sizes = [7.25, 2, 2] args.x_pixel_um = 2 args.y_pixel_um = 2 args.z_pixel_um = 7.25 extract_cubes.main( get_cells(args.paths.cells_file_path), args.paths.tmp__cubes_output_dir, planes_paths, args.cube_depth, args.cube_width, args.cube_height, args.voxel_sizes, args.network_voxel_sizes, args.max_ram, args.n_free_cpus, args.save_empty_cubes, ) validation_cubes_scale = load_cubes_in_dir(validate_cubes_scale_dir) test_cubes = load_cubes_in_dir(tmpdir) for idx, test_cube in enumerate(test_cubes): assert (validation_cubes_scale[idx] == test_cube).all() # test edge of data errors cell = Cell("x0y0z10", 2) plane_paths = os.listdir(signal_data_dir) first_plane = tifffile.imread(os.path.join(signal_data_dir, plane_paths[0])) stack_shape = first_plane.shape + (depth, ) stacks = {} stacks[0] = np.zeros(stack_shape, dtype=np.uint16) stacks[0][:, :, 0] = first_plane for plane in range(1, depth): im_path = os.path.join(signal_data_dir, plane_paths[plane]) stacks[0][:, :, plane] = tifffile.imread(im_path) cube = extract_cubes.Cube(cell, 0, stacks) assert (cube.data == 0).all() cell = Cell("x2500y2500z10", 2) cube = extract_cubes.Cube(cell, 0, stacks) assert (cube.data == 0).all() # test insufficient z-planes for a specific cube stacks[0] = stacks[0][:, :, 1:] cube = extract_cubes.Cube(cell, 0, stacks) assert (cube.data == 0).all() # test insufficient z-planes for any cube to be extracted at all. delete_directory_contents(tmpdir) # args.z_pixel_um = 0.1 args.voxel_sizes[0] = 0.1 with pytest.raises(extract_cubes.StackSizeError): extract_cubes.main( get_cells(args.paths.cells_file_path), args.paths.tmp__cubes_output_dir, planes_paths, args.cube_depth, args.cube_width, args.cube_height, args.voxel_sizes, args.network_voxel_sizes, args.max_ram, args.n_free_cpus, args.save_empty_cubes, )
def napari_array_to_cell_list(cell_array, type=-1): cell_list = [] for row in range(0, len(cell_array)): cell_list.append(Cell(np.flip(cell_array[row]), type)) return cell_list