def combine(self): super().combine() statistic_maps = {} for name in self._sample_storage: samples_path = os.path.join(self._output_dir, name + '.samples.npy') samples = open_memmap(samples_path, mode='r') statistic_maps[name] = np.mean(samples, axis=1) statistic_maps[name + '.std'] = np.std(samples, axis=1) write_all_as_nifti(restore_volumes(statistic_maps, self._mask), os.path.join(self._output_dir, 'univariate_normal'), nifti_header=self._nifti_header, gzip=self._write_volumes_gzipped) write_all_as_nifti({'UsedMask': self._mask}, self._output_dir, nifti_header=self._nifti_header, gzip=self._write_volumes_gzipped) if not self._keep_samples: for ind, name in enumerate(self._model.get_free_param_names()): os.remove(os.path.join(self._output_dir, name + '.samples.npy')) else: return load_samples(self._output_dir)
def _combine_volumes(self, output_dir, tmp_storage_dir, nifti_header, maps_subdir=''): """Combine volumes found in subdirectories to a final volume. Args: output_dir (str): the location for the output files tmp_storage_dir (str): the directory with the temporary results maps_subdir (str): the subdirectory for both the output directory as the tmp storage directory. If this is set we will load the results from a subdirectory (with this name) from the tmp_storage_dir and write the results to a subdirectory (with this name) in the output dir. Returns: dict: the dictionary with the ROIs for every volume, by parameter name """ full_output_dir = os.path.join(output_dir, maps_subdir) if not os.path.exists(full_output_dir): os.makedirs(full_output_dir) for fname in os.listdir(full_output_dir): if fname.endswith('.nii.gz'): os.remove(os.path.join(full_output_dir, fname)) map_names = list(map(lambda p: os.path.splitext(os.path.basename(p))[0], glob.glob(os.path.join(tmp_storage_dir, maps_subdir, '*.npy')))) chunks_dir = os.path.join(tmp_storage_dir, maps_subdir) for map_name in map_names: data = np.load(os.path.join(chunks_dir, map_name + '.npy'), mmap_mode='r') write_all_as_nifti({map_name: data}, full_output_dir, nifti_header=nifti_header, gzip=self._write_volumes_gzipped)
def _combine_volumes_write_out(info_pair): """Write out the given information to a nifti volume. Needs to be used by ModelProcessor._combine_volumes """ map_name, info_list = info_pair chunks_dir, output_dir, nifti_header, write_gzipped = info_list data = np.load(os.path.join(chunks_dir, map_name + '.npy'), mmap_mode='r') write_all_as_nifti({map_name: data}, output_dir, nifti_header=nifti_header, gzip=write_gzipped)
def write_volume_maps(maps, directory, header=None, overwrite_volumes=True, gzip=True): """Write a dictionary with maps to the given directory using the given header. Args: maps (dict): The maps with as keys the map names and as values 3d or 4d maps directory (str): The dir to write to header: The Nibabel Image Header overwrite_volumes (boolean): If we want to overwrite the volumes if they are present. gzip (boolean): if we want to write the results gzipped """ from mdt.lib.nifti import write_all_as_nifti write_all_as_nifti(maps, directory, nifti_header=header, overwrite_volumes=overwrite_volumes, gzip=gzip)