def as_cell(self, force_typed=True): if force_typed: return (Cell(self.path, -1) if self.label is None else Cell( self.path, self.label)) else: return (UntypedCell(self.path) if self.label is None else Cell( self.path, self.label))
def get_results(self): logging.info("Splitting cell clusters and writing results") max_cell_volume = sphere_volume(self.soma_size_spread_factor * self.soma_diameter / 2) cells = [] for ( cell_id, cell_points, ) in self.cell_detector.get_coords_list().items(): cell_volume = len(cell_points) if cell_volume < max_cell_volume: cell_centre = get_structure_centre_wrapper(cell_points) cells.append( Cell( (cell_centre["x"], cell_centre["y"], cell_centre["z"]), Cell.UNKNOWN, )) else: if cell_volume < self.max_cluster_size: try: cell_centres = split_cells( cell_points, outlier_keep=self.outlier_keep) except (ValueError, AssertionError) as err: raise StructureSplitException( f"Cell {cell_id}, error; {err}") for cell_centre in cell_centres: cells.append( Cell( ( cell_centre["x"], cell_centre["y"], cell_centre["z"], ), Cell.UNKNOWN, )) else: cell_centre = get_structure_centre_wrapper(cell_points) cells.append( Cell( ( cell_centre["x"], cell_centre["y"], cell_centre["z"], ), Cell.ARTIFACT, )) xml_file_path = os.path.join(self.output_folder, self.output_file + ".xml") save_cells( cells, xml_file_path, save_csv=self.save_csv, artifact_keep=self.artifact_keep, )
def test_cube_extraction(tmpdir, depth=20): tmpdir = str(tmpdir) args = CubeExtractArgs(tmpdir) extract_cubes.main(args) validation_cubes = load_cubes_in_dir(validate_cubes_dir) test_cubes = load_cubes_in_dir(tmpdir) for idx, test_cube in enumerate(test_cubes): assert (validation_cubes[idx] == test_cube).all() system.delete_directory_contents(tmpdir) # test cube scaling args.x_pixel_um = 2 args.y_pixel_um = 2 args.z_pixel_um = 7.25 extract_cubes.main(args) validation_cubes_scale = load_cubes_in_dir(validate_cubes_scale_dir) test_cubes = load_cubes_in_dir(tmpdir) for idx, test_cube in enumerate(test_cubes): assert (validation_cubes_scale[idx] == test_cube).all() # test edge of data errors cell = Cell("x0y0z10", 2) plane_paths = os.listdir(signal_data_dir[0]) first_plane = tifffile.imread( os.path.join(signal_data_dir[0], plane_paths[0])) stack_shape = first_plane.shape + (depth, ) stacks = {} stacks[0] = np.zeros(stack_shape, dtype=np.uint16) stacks[0][:, :, 0] = first_plane for plane in range(1, depth): im_path = os.path.join(signal_data_dir[0], plane_paths[plane]) stacks[0][:, :, plane] = tifffile.imread(im_path) cube = extract_cubes.Cube(cell, 0, stacks) assert (cube.data == 0).all() cell = Cell("x2500y2500z10", 2) cube = extract_cubes.Cube(cell, 0, stacks) assert (cube.data == 0).all() # test insufficient z-planes for a specific cube stacks[0] = stacks[0][:, :, 1:] cube = extract_cubes.Cube(cell, 0, stacks) assert (cube.data == 0).all() # test insufficient z-planes for any cube to be extracted at all. system.delete_directory_contents(tmpdir) args.z_pixel_um = 0.1 with pytest.raises(extract_cubes.StackSizeError): extract_cubes.main(args)
def get_cells_dir(cells_file_path, cell_type=None): cells = [] for file in os.listdir(cells_file_path): # ignore hidden files if not file.startswith("."): cells.append(Cell(file, cell_type)) return cells
def get_cells_xml(xml_file_path, cells_only=False): with open(xml_file_path, "r") as xml_file: root = ElementTree.parse(xml_file).getroot() cells = [] for type_marker in root.find("Marker_Data").findall("Marker_Type"): cell_type = int(type_marker.find("Type").text) for cell_element in type_marker.findall("Marker"): cells.append(Cell(cell_element, cell_type)) if not cells: raise MissingCellsError( "No cells found in file {}".format(xml_file_path)) if cells_only: cells = [c for c in cells if c.is_cell()] return cells
def test_get_cells(): cells = cell_io.get_cells(xml_path) assert len(cells) == 65 assert Cell([2536, 523, 1286], 1) == cells[64] cells = cell_io.get_cells(cubes_dir) assert len(cells) == 4 assert natsorted(cubes_cells) == natsorted(cells) cells = cell_io.get_cells(roi_sorter_output_dir) assert len(cells) == 4 assert natsorted(roi_sorter_cells) == natsorted(cells) with pytest.raises(NotImplementedError): assert cell_io.get_cells("misc_format.abc")
def get_cells_yml(cells_file_path, ignore_type=False, marker="markers"): if not ignore_type: raise NotImplementedError( "Parsing cell types is not yet implemented for YAML files. " "Currently the only option is to merge them. Please try again with" " 'ignore_type=True'.") else: with open(cells_file_path, "r") as yml_file: data = yaml.safe_load(yml_file) cells = [] for cell_type in list(data.keys()): type_dict = data[cell_type] if marker in type_dict.keys(): for cell in type_dict[marker]: cells.append(Cell(cell, Cell.UNKNOWN)) return cells
1275, 1278, 1286, 1278, 1288, 1295, 1279, 1282, 1275, 1276, 1275, 1286, ] cubes_cells = [ Cell([340, 1004, 15], 1), Cell([340, 1004, 15], 1), Cell([392, 522, 10], 1), Cell([392, 522, 10], 1), ] roi_sorter_cells = [ Cell([4056, 564, 358], 1), Cell([3989, 267, 570], 1), Cell([4351, 735, 439], 1), Cell([4395, 677, 367], 1), ] def test_get_cells(): cells = cell_io.get_cells(xml_path)
import shutil import pytest from cellfinder.classify import classify import cellfinder.IO.cells as cell_io from cellfinder.cells.cells import Cell from cellfinder.tools.prep import prep_classification data_dir = os.path.join(os.getcwd(), "tests", "data") cubes_dir = os.path.join(data_dir, "classify", "cubes") signal_channel = 1 background_channel = 2 cells_validation = [ Cell("z222y2805x9962", 2), Cell("z258y3892x10559", 2), Cell("z413y2308x9391", 2), Cell("z416y2503x5997", 1), Cell("z418y5457x9489", 1), Cell("z433y4425x7552", 1), ] class ClassifyArgs: def __init__( self, tmpdir, cubes_output_dir, cell_classification_file="cell_classification.xml", debug=False,