def save_brain(image, source_image_path, output_path): registration_config = source_custom_config_amap() atlas_scale, transformation_matrix = get_transform_space_params( registration_config, source_image_path) brainio.to_nii( image.astype(np.int16), str(output_path), scale=atlas_scale, affine_transform=transformation_matrix, )
def config_parse(parser): config_opt_parser = parser.add_argument_group("Config options") config_opt_parser.add_argument( "--registration-config", dest="registration_config", type=str, default=source_files.source_custom_config_amap(), help="To supply your own, custom registration configuration file.", ) return parser
def amend_cfg(new_atlas_folder=None, atlas=None): """ Updates the registration config file to point to the correct atlas path :param new_atlas_folder: """ print("Ensuring custom config file is correct") original_config = source_files.source_config_amap() new_config = source_files.source_custom_config_amap() if new_atlas_folder is not None: write_atlas_to_cfg(new_atlas_folder, atlas, original_config, new_config)
def summarise_brain_regions(label_layers, filename): summaries = [] for label_layer in label_layers: summaries.append(summarise_single_brain_region(label_layer)) result = pd.concat(summaries) volume_header = "volume_mm3" length_columns = [ "x_min_um", "y_min_um", "z_min_um", "x_max_um", "y_max_um", "z_max_um", "x_center_um", "y_center_um", "z_center_um", ] result.columns = ["region"] + [volume_header] + length_columns atlas_pixel_sizes = get_atlas_pixel_sizes(source_custom_config_amap()) voxel_volume = get_voxel_volume(source_custom_config_amap()) / (1000**3) result[volume_header] = result[volume_header] * voxel_volume for header in length_columns: for dim in atlas_pixel_sizes.keys(): if header.startswith(dim): scale = float(atlas_pixel_sizes[dim]) assert scale > 0 result[header] = result[header] * scale result.to_csv(filename, index=False)
def check_atlas_install(cfg_file_path=None): """ Checks whether the atlas directory exists, and whether it's empty or not. :return: Whether the directory exists, and whether the files also exist """ dir_exists = False files_exist = False if cfg_file_path is None: cfg_file_path = source_files.source_custom_config_amap() else: pass if os.path.exists(cfg_file_path): config_obj = get_config_obj(cfg_file_path) atlas_conf = config_obj["atlas"] atlas_directory = atlas_conf["base_folder"] if os.path.exists(atlas_directory): dir_exists = True if not os.listdir(atlas_directory) == []: files_exist = True return dir_exists, files_exist
def analyse_region_brain_areas( label_layer, destination_directory, annotations, hemispheres, structures_reference_df, extension=".csv", ignore_empty=True, ): """ :param label_layer: napari labels layer (with segmented regions) :param np.array annotations: numpy array of the brain area annotations :param np.array hemispheres: numpy array of hemipshere annotations :param structures_reference_df: Pandas dataframe with "id" column (matching the values in "annotations" and a "name column" :param ignore_empty: If True, don't analyse empty regions """ data = label_layer.data if ignore_empty: if data.sum() == 0: return # swap data back to original orientation from napari orientation data = np.swapaxes(data, 2, 0) name = label_layer.name masked_annotations = data.astype(bool) * annotations # TODO: don't hardcode hemisphere value. Get from atlas config annotations_left, annotations_right = lateralise_atlas( masked_annotations, hemispheres, left_hemisphere_value=2, right_hemisphere_value=1, ) unique_vals_left, counts_left = np.unique(annotations_left, return_counts=True) unique_vals_right, counts_right = np.unique(annotations_right, return_counts=True) voxel_volume = get_voxel_volume(source_custom_config_amap()) voxel_volume_in_mm = voxel_volume / (1000**3) df = initialise_df( "structure_name", "left_volume_mm3", "left_percentage_of_total", "right_volume_mm3", "right_percentage_of_total", "total_volume_mm3", "percentage_of_total", ) sampled_structures = unique_elements_lists( list(unique_vals_left) + list(unique_vals_right)) total_volume_region = get_total_volume_regions(unique_vals_left, unique_vals_right, counts_left, counts_right) for atlas_value in sampled_structures: if atlas_value != 0: try: df = add_structure_volume_to_df( df, atlas_value, structures_reference_df, unique_vals_left, unique_vals_right, counts_left, counts_right, voxel_volume_in_mm, total_volume_voxels=total_volume_region, ) except UnknownAtlasValue: print( "Value: {} is not in the atlas structure reference file. " "Not calculating the volume".format(atlas_value)) filename = destination_directory / (name + extension) df.to_csv(filename, index=False)
amap_output_dir = Path(amap_output_dir) annotations_image = load_any(amap_output_dir / reg_paths.ANNOTATIONS) midpoint = int(annotations_image.shape[0] // 2) hemispheres_image = load_any(amap_output_dir / reg_paths.HEMISPHERES) sub_region_values = list(sub_regions["id"]) region_mask = np.isin(annotations_image, sub_region_values) left_region_mask = region_mask * (hemispheres_image == left_hemisphere_value) right_region_mask = region_mask * (hemispheres_image == right_hemisphere_value) left_region_summary = regionprops(left_region_mask.astype(np.int8))[0] right_region_summary = regionprops(right_region_mask.astype(np.int8))[0] atlas_pixel_sizes = get_atlas_pixel_sizes(source_custom_config_amap()) results_dict = { "x_min_um_left": left_region_summary.bbox[0] * int(atlas_pixel_sizes["x"]), "y_min_um_left": left_region_summary.bbox[1] * int(atlas_pixel_sizes["y"]), "z_min_um_left": left_region_summary.bbox[2] * int(atlas_pixel_sizes["z"]), "x_max_um_left": left_region_summary.bbox[3] * int(atlas_pixel_sizes["x"]), "y_max_um_left": left_region_summary.bbox[4] * int(atlas_pixel_sizes["y"]), "z_max_um_left": left_region_summary.bbox[5] * int(atlas_pixel_sizes["z"]), "x_center_um_left":
def registration_parse(parser): registration_opt_parser = parser.add_argument_group("Registration options") registration_opt_parser.add_argument( "--registration-config", dest="registration_config", type=str, default=source_files.source_custom_config_amap(), help="To supply your own, custom registration configuration file.", ) registration_opt_parser.add_argument( "--sort-input-file", dest="sort_input_file", action="store_true", help="If set to true, the input text file will be sorted using " "natural sorting. This means that the file paths will be " "sorted as would be expected by a human and " "not purely alphabetically", ) registration_opt_parser.add_argument( "--no-save-downsampled", dest="no_save_downsampled", action="store_true", help="Dont save the downsampled brain before filtering.", ) registration_opt_parser.add_argument( "--affine-n-steps", dest="affine_n_steps", type=check_positive_int, default=6, ) registration_opt_parser.add_argument( "--affine-use-n-steps", dest="affine_use_n_steps", type=check_positive_int, default=5, ) registration_opt_parser.add_argument( "--freeform-n-steps", dest="freeform_n_steps", type=check_positive_int, default=6, ) registration_opt_parser.add_argument( "--freeform-use-n-steps", dest="freeform_use_n_steps", type=check_positive_int, default=4, ) registration_opt_parser.add_argument( "--bending-energy-weight", dest="bending_energy_weight", type=check_positive_float, default=0.95, ) registration_opt_parser.add_argument("--grid-spacing", dest="grid_spacing", type=int, default=-10) registration_opt_parser.add_argument( "--smoothing-sigma-reference", dest="smoothing_sigma_reference", type=float, default=-1.0, ) registration_opt_parser.add_argument( "--smoothing-sigma-floating", dest="smoothing_sigma_floating", type=float, default=-1.0, ) registration_opt_parser.add_argument( "--histogram-n-bins-floating", dest="histogram_n_bins_floating", type=check_positive_int, default=128, ) registration_opt_parser.add_argument( "--histogram-n-bins-reference", dest="histogram_n_bins_reference", type=check_positive_int, default=128, ) return parser
def get_atlas_config(self): if self._atlas_config is None: self._atlas_config = source_custom_config_amap()