def get_downsampled_space(atlas, downsampled_image_path): target_shape = tifffile.imread(downsampled_image_path).shape downsampled_space = bgs.AnatomicalSpace( atlas.metadata["orientation"], shape=target_shape, resolution=atlas.resolution, ) return downsampled_space
def run_analysis( cells, signal_planes, orientation, voxel_sizes, atlas, deformation_field_paths, downsampled_space, downsampled_points_path, atlas_points_path, brainrender_points_path, abc4d_points_path, volume_csv_path, all_points_csv_path, summary_csv_path, ): source_shape = tuple( imio.get_size_image_from_file_paths(signal_planes).values()) source_shape = (source_shape[2], source_shape[1], source_shape[0]) source_space = bgs.AnatomicalSpace( orientation, shape=source_shape, resolution=[float(i) for i in voxel_sizes], ) transformed_cells = transform_points_to_atlas_space( cells, source_space, atlas, deformation_field_paths, downsampled_space, downsampled_points_path=downsampled_points_path, atlas_points_path=atlas_points_path, ) logging.info("Summarising cell positions") points = summarise_points( cells, transformed_cells, atlas, volume_csv_path, all_points_csv_path, summary_csv_path, ) logging.info("Exporting data") export_points( points, transformed_cells, atlas.resolution[0], brainrender_points_path, abc4d_points_path, )
def get_scale(atlas, metadata, scaling_rounding_decimals=5): source_space = bgs.AnatomicalSpace(metadata["orientation"]) scaling = [] for idx, axis in enumerate(atlas.space.axes_order): scaling.append( round( atlas.resolution[atlas.space.axes_order.index( source_space.axes_order[idx])] / float(metadata["voxel_sizes"][idx]), scaling_rounding_decimals, )) return tuple(scaling)
def get_dims_from_origins(origins): """ From a list of BG space abbreviations (e.g. ["asl","sla","lsa"]) get correct axes for display in Napari """ all_dims = [] for o in range(len(origins)): sc = bg.AnatomicalSpace(origins[0]) next_orientation = origins[(o + 1) % len(origins)] dims, flips, _, _ = sc.map_to(next_orientation) assert not any( flips ), f"\nReceived orientations: {origins}\nThese require (orientation) flips. This is not currently supported" all_dims.append(list(dims)) return all_dims
def run(args, atlas, downsampled_space): deformation_field_paths = [ args.brainreg_paths.deformation_field_0, args.brainreg_paths.deformation_field_1, args.brainreg_paths.deformation_field_2, ] cells = get_cells(args.paths.classified_points, cells_only=True) cell_list = [] for cell in cells: cell_list.append([cell.z, cell.y, cell.x]) cells = np.array(cell_list) source_shape = tuple( imio.get_size_image_from_file_paths( args.signal_planes_paths[0]).values()) source_shape = (source_shape[2], source_shape[1], source_shape[0]) source_space = bgs.AnatomicalSpace( args.orientation, shape=source_shape, resolution=[float(i) for i in args.voxel_sizes], ) transformed_cells = transform_points_to_atlas_space( cells, source_space, atlas, deformation_field_paths, downsampled_space, downsampled_points_path=args.paths.downsampled_points, atlas_points_path=args.paths.atlas_points, ) logging.info("Exporting cells to brainrender") export_points( transformed_cells, atlas.resolution[0], args.paths.brainrender_points, ) logging.info("Summarising cell positions") summarise_points( cells, transformed_cells, atlas, args.brainreg_paths.volume_csv_path, args.paths.all_points_csv, args.paths.summary_csv, )
def main( atlas, data_orientation, target_brain_path, paths, voxel_sizes, niftyreg_args, n_free_cpus=2, sort_input_file=False, additional_images_downsample=None, backend="niftyreg", scaling_rounding_decimals=5, debug=False, ): atlas = BrainGlobeAtlas(atlas) source_space = bg.AnatomicalSpace(data_orientation) scaling = [] for idx, axis in enumerate(atlas.space.axes_order): scaling.append( round( float(voxel_sizes[idx]) / atlas.resolution[atlas.space.axes_order.index( source_space.axes_order[idx])], scaling_rounding_decimals, )) n_processes = get_num_processes(min_free_cpu_cores=n_free_cpus) load_parallel = n_processes > 1 logging.info("Loading raw image data") target_brain = imio.load_any( target_brain_path, scaling[1], scaling[2], scaling[0], load_parallel=load_parallel, sort_input_file=sort_input_file, n_free_cpus=n_free_cpus, ) target_brain = bg.map_stack_to(data_orientation, atlas.metadata["orientation"], target_brain) if backend == "niftyreg": run_niftyreg( paths.registration_output_folder, paths, atlas, target_brain, n_processes, additional_images_downsample, data_orientation, atlas.metadata["orientation"], niftyreg_args, scaling, load_parallel, sort_input_file, n_free_cpus, debug=debug, ) logging.info("Calculating volumes of each brain area") calculate_volumes( atlas, paths.registered_atlas, paths.registered_hemispheres, paths.volume_csv_path, # for all brainglobe atlases left_hemisphere_value=1, right_hemisphere_value=2, ) logging.info("Generating boundary image") boundaries( paths.registered_atlas, paths.boundaries_file_path, ) logging.info(f"brainreg completed. Results can be found here: " f"{paths.registration_output_folder}")