def check_registration(): map_configs = '' maps = {} particularly_bad_subjects = [ 'mgh_1002', 'mgh_1004', 'mgh_1008', 'mgh_1009', 'mgh_1012', 'mgh_1013', 'mgh_1015', 'mgh_1017', 'mgh_1021', 'mgh_1022', 'mgh_1032' ] for subject in os.listdir(output_pjoin()): # for subject in particularly_bad_subjects: point_map = mdt.load_nifti( output_pjoin(subject, 'warped_BinghamNODDI_r1_w_in0.w')).get_data() std_map = mdt.load_nifti( output_pjoin(subject, 'warped_BinghamNODDI_r1_w_in0.w.std')).get_data() maps[subject + '.std'] = std_map maps[subject] = point_map map_configs += ''' {0}: scale: {{use_max: true, use_min: true, vmax: 0.8, vmin: 0.0}} {0}.std: scale: {{use_max: true, use_min: true, vmax: 0.1, vmin: 0.0}} '''.format(subject) config = ''' colorbar_settings: location: right nmr_ticks: 4 power_limits: [-3, 4] round_precision: 3 visible: false grid_layout: - Rectangular - cols: null rows: 4 spacings: {bottom: 0.03, hspace: 0.15, left: 0.1, right: 0.86, top: 0.97, wspace: 0.4} slice_index: 90 zoom: p0: {x: 16, y: 14} p1: {x: 161, y: 200} colormap_masked_color: 'k' ''' if map_configs: config += ''' map_plot_options: ''' + map_configs + ''' ''' config += ''' maps_to_show: [''' + ', '.join(sorted(maps)) + '''] ''' mdt.view_maps(maps, config=config)
def correct_mgh_image_position(input_fname, output_fname=None): """The HCP MGH data is ill-positioned for the registration algorithm, this function corrects that.""" header = mdt.load_nifti(input_fname).get_header() data = mdt.load_nifti(input_fname).get_data() if output_fname is None: output_fname = input_fname mdt.write_nifti(data[:, ::-1], output_fname, header)
def _get_subject_maps(model_name, map_name): data_name = '{}_{}'.format(model_name, map_name) map_list = [] for subject in os.listdir(registration_pjoin()): if subject in subjects_to_filter: continue data = mdt.load_nifti( registration_pjoin(subject, 'warped_' + data_name)).get_data() map_list.append(data) return map_list
def __init__(self, channels, x0=None, cl_device_ind=None, **kwargs): """Reconstruct the input using the STARC method. Args: channels (list): the list of input nifti files, one for each channel element. Every nifti file should be a 4d matrix with on the 4th dimension all the time series. The length of this list should equal the number of input channels. x0 (ndarray or str): optional, the set of weights to use as a starting point for the fitting routine. cl_device_ind (int or list of int): the list of indices into :func:`mct.utils.get_cl_devices` that you want to use for the OpenCL based optimization. """ super().__init__(channels, **kwargs) cl_environments = None if cl_device_ind is not None: if not isinstance(cl_device_ind, (tuple, list)): cl_device_ind = [cl_device_ind] cl_environments = [get_cl_devices()[ind] for ind in cl_device_ind] self.cl_runtime_info = CLRuntimeInfo(cl_environments=cl_environments) self._x0 = x0 if isinstance(self._x0, str): self._x0 = mdt.load_nifti(x0).get_data()
def check_registration(): map_configs = '' maps = {} for subject in os.listdir(output_pjoin()): fa_map = mdt.load_nifti(output_pjoin(subject, 'warped_Tensor_Tensor.FA')).get_data() maps[subject] = fa_map map_configs += ''' {}: scale: {{use_max: true, use_min: true, vmax: 0.5, vmin: 0.0}} '''.format(subject) config = ''' colorbar_settings: location: right nmr_ticks: 4 power_limits: [-3, 4] round_precision: 3 visible: false grid_layout: - Rectangular - cols: null rows: 4 spacings: {bottom: 0.03, hspace: 0.15, left: 0.1, right: 0.86, top: 0.97, wspace: 0.4} slice_index: 90 zoom: p0: {x: 16, y: 14} p1: {x: 161, y: 200} colormap_masked_color: 'k' ''' config += ''' map_plot_options: ''' + map_configs + ''' maps_to_show: [''' + ', '.join(sorted(maps)) + '''] ''' mdt.view_maps(maps, config=config)
def run(self, args, extra_args): file_names = [] images = [] for file in args.input_files: globbed = glob.glob(file) if globbed: for fname in globbed: file_names.append(fname) images.append( mdt.load_nifti(os.path.realpath(fname)).get_data()) else: file_names.append(file) images.append( mdt.load_nifti(os.path.realpath(file)).get_data()) if args.verbose: print('') if args.input_4d: images = self._images_3d_to_4d(images) context_dict = {'input': images, 'i': images, 'np': np, 'mdt': mdt} alpha_chars = list('abcdefghjklmnopqrstuvwxyz') for ind, image in enumerate(images): context_dict.update({alpha_chars[ind]: image}) if args.verbose: print('Input {ind} ({alpha}):'.format(ind=ind, alpha=alpha_chars[ind])) print(' name: {}'.format( split_image_path(file_names[ind])[1])) print(' shape: {}'.format(str(image.shape))) if args.verbose: print('') print("Evaluating: '{expr}'".format(expr=args.expr)) if args.as_expression: output = eval(args.expr, context_dict) else: expr = textwrap.dedent(''' def mdt_image_math(): {} output = mdt_image_math() ''').format(args.expr) exec(expr, context_dict) output = context_dict['output'] if args.verbose: print('') if isinstance(output, np.ndarray): print('Output shape: {shape}'.format(shape=str(output.shape))) else: print('Output is single value') print('Output: ') print('') print(output) else: if not args.write_output: print(output) if args.verbose: print('') if args.write_output: if isinstance(output, Sequence): if args.output_file: output_file = os.path.realpath(args.output_file) dirname, basename, ext = split_image_path(output_file) for ind, element in enumerate(output): mdt.write_nifti( element, dirname + basename + '_' + str(ind) + ext, mdt.load_nifti(file_names[0]).header) else: for ind, element in enumerate(output): output_file = os.path.realpath(file_names[ind]) mdt.write_nifti(element, output_file, mdt.load_nifti(file_names[ind]).header) else: if args.output_file: output_file = os.path.realpath(args.output_file) else: output_file = os.path.realpath(file_names[0]) mdt.write_nifti(output, output_file, mdt.load_nifti(file_names[0]).header)
__maintainer__ = 'Robbert Harms' __email__ = '*****@*****.**' __licence__ = 'LGPL v3' output_pjoin = mdt.make_path_joiner( '/home/robbert/phd-data/papers/uncertainty_paper/registration/') mask = mdt.load_brain_mask( '/usr/share/data/fsl-mni152-templates/FMRIB58_FA_1mm.nii.gz') mask = binary_erosion(mask, iterations=1) maps = {} subjects_to_load = ['mgh_1005', 'mgh_1016', 'mgh_1017'] for subject in subjects_to_load: point_map = mdt.load_nifti( output_pjoin(subject, 'warped_BinghamNODDI_r1_w_in0.w')).get_data() std_map = mdt.load_nifti( output_pjoin(subject, 'warped_BinghamNODDI_r1_w_in0.w.std')).get_data() maps[subject + '.std'] = std_map maps[subject] = point_map mdt.apply_mask(maps, mask) # height : 1000px mdt.view_maps(maps, config=''' colorbar_settings: location: right
def run(self, args): write_output = args.output_file is not None if write_output: output_file = os.path.realpath(args.output_file) if os.path.isfile(output_file): os.remove(output_file) file_names = [] for file in args.input_files: file_names.extend(glob.glob(file)) if args.verbose: print('') images = [mdt.load_nifti(dwi_image).get_data() for dwi_image in file_names] if args.input_4d: images = self._images_3d_to_4d(images) context_dict = {'input': images, 'i': images, 'np': np, 'mdt': mdt} alpha_chars = list('abcdefghjklmnopqrstuvwxyz') for ind, image in enumerate(images): context_dict.update({alpha_chars[ind]: image}) if args.verbose: print('Input {ind} ({alpha}):'.format(ind=ind, alpha=alpha_chars[ind])) print(' name: {}'.format(split_image_path(file_names[ind])[1])) print(' shape: {}'.format(str(image.shape))) if args.verbose: print('') print("Evaluating: '{expr}'".format(expr=args.expr)) if args.as_expression: output = eval(args.expr, context_dict) else: expr = textwrap.dedent(''' def mdt_image_math(): {} output = mdt_image_math() ''').format(args.expr) exec(expr, context_dict) output = context_dict['output'] if args.verbose: print('') if isinstance(output, np.ndarray): print('Output shape: {shape}'.format(shape=str(output.shape))) else: print('Output is single value') print('Output: ') print('') print(output) else: if not write_output: print(output) if args.verbose: print('') if write_output: mdt.write_image(output_file, output, mdt.load_nifti(file_names[0]).get_header())