def calc_labels_center_of_mass(subject, atlas, read_from_annotation=True, surf_name='pial', labels_fol='', labels=None): import csv # if (read_from_annotation): # labels = mne.read_labels_from_annot(subject, atlas, 'both', surf_name, subjects_dir=SUBJECTS_DIR) # if len(labels) == 0: # print('No labels were found in {} annotation file!'.format(atlas)) # else: # labels = [] # if labels_fol == '': # labels_fol = op.join(SUBJECTS_DIR, subject, 'label', atlas) # for label_file in glob.glob(op.join(labels_fol, '*.label')): # label = mne.read_label(label_file) # labels.append(label) # if len(labels) == 0: # print('No labels were found in {}!'.format(labels_fol)) labels = lu.read_labels(subject, SUBJECTS_DIR, atlas) if len(labels) > 0: center_of_mass = lu.calc_center_of_mass(labels) with open(op.join(SUBJECTS_DIR, subject, 'label', '{}_center_of_mass.csv'.format(atlas)), 'w') as csvfile: writer = csv.writer(csvfile, delimiter=',') for label in labels: writer.writerow([label.name, *center_of_mass[label.name]]) com_fname = op.join(SUBJECTS_DIR, subject, 'label', '{}_center_of_mass.pkl'.format(atlas)) blend_fname = op.join(MMVT_DIR, subject, '{}_center_of_mass.pkl'.format(atlas)) utils.save(center_of_mass, com_fname) shutil.copyfile(com_fname, blend_fname) return len(labels) > 0 and op.isfile(com_fname) and op.isfile(blend_fname)
def save_labels_coloring(subject, atlas, n_jobs=2): ret = False coloring_dir = op.join(MMVT_DIR, subject, 'coloring') utils.make_dir(coloring_dir) coloring_fname = op.join(coloring_dir, 'labels_{}_coloring.csv'.format(atlas)) coloring_names_fname = op.join(coloring_dir, 'labels_{}_colors_names.csv'.format(atlas)) try: labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, n_jobs=n_jobs) colors_rgb_and_names = cu.get_distinct_colors_and_names() labels_colors_rgb, labels_colors_names = {}, {} for label in labels: label_inv_name = lu.get_label_hemi_invariant_name(label.name) if label_inv_name not in labels_colors_rgb: labels_colors_rgb[label_inv_name], labels_colors_names[label_inv_name] = next(colors_rgb_and_names) with open(coloring_fname, 'w') as colors_file, open(coloring_names_fname, 'w') as col_names_file: for label in labels: label_inv_name = lu.get_label_hemi_invariant_name(label.name) color_rgb = labels_colors_rgb[label_inv_name] color_name = labels_colors_names[label_inv_name] colors_file.write('{},{},{},{}\n'.format(label.name, *color_rgb)) col_names_file.write('{},{}\n'.format(label.name, color_name)) ret = op.isfile(coloring_fname) except: print('Error in save_labels_coloring!') print(traceback.format_exc()) return ret
def init(subject, atlas, n_jobs): from src.utils import geometry_utils as gu if not utils.both_hemi_files_exist( op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format( '{hemi}', atlas))): anat.create_annotation(subject, atlas) if not utils.both_hemi_files_exist( op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format( '{hemi}', atlas))): raise Exception( 'Can\'t find the cortical atlas {} for subject {}'.format( atlas, subject)) labels_vertices = find_rois.read_labels_vertices(SUBJECTS_DIR, subject, atlas, n_jobs) labels = lu.read_labels(subject, SUBJECTS_DIR, atlas) labels_names = [l.name for l in labels] aseg_atlas_fname = op.join(SUBJECTS_DIR, subject, 'mri', 'aseg.mgz') aseg_data = nib.load(aseg_atlas_fname).get_data() lut = fu.import_freesurfer_lut() pia_verts = {} for hemi in ['rh', 'lh']: pia_verts[hemi], _ = gu.read_surface( op.join(SUBJECTS_DIR, subject, 'surf', '{}.pial'.format(hemi))) # pia_verts[hemi], _ = nib.freesurfer.read_geometry( # op.join(SUBJECTS_DIR, subject, 'surf', '{}.pial'.format(hemi))) subs_center_of_mass, subs_names = calc_subcorticals_pos( subject, aseg_data, lut) labels_center_of_mass = lu.calc_center_of_mass(labels, ret_mat=True) * 1000 regions_center_of_mass = np.concatenate( (labels_center_of_mass, subs_center_of_mass)) regions_names = labels_names + subs_names # save_com_as_elecs(subject, regions_center_of_mass, regions_names, atlas) # save_com_as_elecs(subject, subs_center_of_mass, subs_names, atlas) return labels_vertices, regions_center_of_mass, regions_names, aseg_data, lut, pia_verts,
def save_rois_connectivity(subject, args): # atlas, mat_fname, mat_field, conditions, stat=STAT_DIFF, windows=0, # labels_exclude=['unknown', 'corpuscallosum'], threshold=0, threshold_percentile=0, # color_map='jet', norm_by_percentile=True, norm_percs=(1, 99), symetric_colors=True): # args.atlas, args.mat_fname, args.mat_field, args.conditions, args.stat, # args.windows, args.labels_exclude, args.threshold, args.threshold_percentile, # args.color_map, args.norm_by_percentile, args.norm_percs) d = dict() data = sio.loadmat(args.mat_fname)[args.mat_field] d['labels'] = lu.read_labels(subject, SUBJECTS_DIR, args.atlas, exclude=args.labels_exclude, sorted_according_to_annot_file=True) d['locations'] = lu.calc_center_of_mass(d['labels'], ret_mat=True) * 1000 d['hemis'] = ['rh' if l.hemi == 'rh' else 'lh' for l in d['labels']] d['labels'] = [l.name for l in d['labels']] (d['con_colors'], d['con_indices'], d['con_names'], d['con_values'], d['con_types'], d['data_max'], d['data_min']) = calc_connections_colors(data, d['labels'], d['hemis'], args) # args.stat, args.conditions, args.windows, args.threshold, # args.threshold_percentile, args.color_map, args.norm_by_percentile, args.norm_percs, args.symetric_colors) d['conditions'] = args.conditions np.savez(op.join(BLENDER_ROOT_DIR, subject, 'rois_con'), **d)
def calc_rois_connectivity( subject, clips, modality, atlas, inverse_method, min_order=1, max_order=20, crop_times=(-0.5, 1), onset_time=2, windows_length=0.1, windows_shift=0.05, overwrite=False, n_jobs=4): windows_length *= 1000 windows_shift *= 1000 params = [] clusters_fol = op.join(MMVT_DIR, subject, meg.modality_fol(modality), 'clusters') fwd_usingMEG, fwd_usingEEG = meg.get_fwd_flags(modality) bands = {'all': [None, None]} crop_times = [t + onset_time for t in crop_times] use_functional_rois_atlas = False conds = [utils.namebase(clip_fname) for clip_fname in clips['ictal']] conds.extend(['{}_baseline'.format(utils.namebase(clip_fname)) for clip_fname in clips['baseline']]) if not use_functional_rois_atlas: labels = lu.read_labels(subject, SUBJECTS_DIR, atlas) func_atlas = con_indentifer = atlas for clip_fname, cond in zip(clips['ictal'] + clips['baseline'], conds): if use_functional_rois_atlas: check_connectivity_labels(clips['ictal'], modality, inverse_method, n_jobs=n_jobs) labels_fol = op.join( clusters_fol, '{}-epilepsy-{}-{}-{}-amplitude-zvals'.format( subject, inverse_method, modality, utils.namebase(clip_fname))) labels = lu.read_labels_files(subject, labels_fol, n_jobs=n_jobs) # for connectivity we need shorter names labels = epi_utils.shorten_labels_names(labels) func_atlas = utils.namebase(clip_fname) con_indentifer = 'func_rois' params.append(( subject, clip_fname, utils.namebase(clip_fname), func_atlas, labels, inverse_method, fwd_usingMEG, fwd_usingEEG, crop_times, bands, min_order, max_order, windows_length, windows_shift, con_indentifer, overwrite, 1)) utils.run_parallel(_calc_clip_rois_connectivity_parallel, params, n_jobs)
def calc_dipoles_rois(subject, atlas='laus125', overwrite=False, n_jobs=4): links_dir = utils.get_links_dir() subjects_dir = utils.get_link_dir(links_dir, 'subjects') mmvt_dir = utils.get_link_dir(links_dir, 'mmvt') diploes_rois_output_fname = op.join(mmvt_dir, subject, 'meg', 'dipoles_rois.pkl') if op.isfile(diploes_rois_output_fname) and not overwrite: diploes_rois = utils.load(diploes_rois_output_fname) for dip in diploes_rois.keys(): diploes_rois[dip]['cortical_probs'] *= 1/sum(diploes_rois[dip]['cortical_probs']) diploes_rois[dip]['subcortical_probs'] = [] diploes_rois[dip]['subcortical_rois'] = [] # coritcal_labels = set(utils.flat_list_of_lists([diploes_rois[k]['cortical_rois'] for k in diploes_rois.keys()])) utils.save(diploes_rois, diploes_rois_output_fname) return True diploes_input_fname = op.join(mmvt_dir, subject, 'meg', 'dipoles.pkl') if not op.isfile(diploes_input_fname): print('No dipoles file!') return False labels = lu.read_labels(subject, subjects_dir, atlas, n_jobs=n_jobs) labels = list([{'name': label.name, 'hemi': label.hemi, 'vertices': label.vertices} for label in labels]) if len(labels) == 0: print('Can\'t find the labels for atlas {}!'.format(atlas)) return False # find the find_rois package mmvt_code_fol = utils.get_mmvt_code_root() ela_code_fol = op.join(utils.get_parent_fol(mmvt_code_fol), 'electrodes_rois') if not op.isdir(ela_code_fol) or not op.isfile(op.join(ela_code_fol, 'find_rois', 'main.py')): print("Can't find ELA folder!") print('git pull https://github.com/pelednoam/electrodes_rois.git') return False # load the find_rois package try: import sys if ela_code_fol not in sys.path: sys.path.append(ela_code_fol) from find_rois import main as ela except: print('Can\'t load find_rois package!') utils.print_last_error_line() return False dipoles_dict = utils.load(diploes_input_fname) diploles_names, dipoles_pos = [], [] for cluster_name, dipoles in dipoles_dict.items(): for begin_t, _, x, y, z, _, _, _, _, _ in dipoles: dipole_name = '{}_{}'.format(cluster_name, begin_t) if len(dipoles) > 1 else cluster_name diploles_names.append(dipole_name.replace(' ', '')) dipoles_pos.append([k * 1e3 for k in [x, y, z]]) dipoles_rois = ela.identify_roi_from_atlas( atlas, labels, diploles_names, dipoles_pos, approx=3, elc_length=0, hit_only_cortex=True, subjects_dir=subjects_dir, subject=subject, n_jobs=n_jobs) # Convert the list to a dict dipoles_rois_dict = {dipoles_rois['name']: dipoles_rois for dipoles_rois in dipoles_rois} utils.save(dipoles_rois_dict, diploes_rois_output_fname)
def save_connectivity_to_blender(subject, atlas, data, conditions, stat, w=0, threshold=0, threshold_percentile=0): d = {} d['labels'] = lu.read_labels(subject, SUBJECTS_DIR, atlas, exclude=['unknown', 'corpuscallosum'], sorted_according_to_annot_file=True) d['locations'] = lu.calc_center_of_mass(d['labels'], ret_mat=True) d['hemis'] = ['rh' if l.hemi == 'rh' else 'lh' for l in d['labels']] d['labels'] = [l.name for l in d['labels']] d['con_colors'], d['con_indices'], d['con_names'], d['con_values'], d['con_types'] = \ calc_connections_colors(data, d['labels'], d['hemis'], stat, w, threshold_percentile=threshold_percentile) d['conditions'] = conditions np.savez(op.join(BLENDER_ROOT_DIR, subject, 'rois_con'), **d)
def save_connectivity_to_blender(subject, atlas, data, conditions, stat, w=0, threshold=0, threshold_percentile=0): d = {} d['labels'] = lu.read_labels(subject, SUBJECTS_DIR, atlas, exclude=('unknown', 'corpuscallosum'), sorted_according_to_annot_file=True) d['locations'] = lu.calc_center_of_mass(d['labels'], ret_mat=True) d['hemis'] = ['rh' if l.hemi == 'rh' else 'lh' for l in d['labels']] d['labels'] = [l.name for l in d['labels']] d['con_colors'], d['con_indices'], d['con_names'], d['con_values'], d['con_types'] = \ calc_connections_colors(data, d['labels'], d['hemis'], stat, w, threshold_percentile=threshold_percentile) d['conditions'] = conditions np.savez(op.join(BLENDER_ROOT_DIR, subject, 'rois_con'), **d)
def merge_fmri_connectivity(args): con_method = 'corr' threshold_perc = 90 template_con = utils.make_dir( op.join(MMVT_DIR, args.template_brain, 'connectivity')) output_fname = op.join(template_con, 'rest_{}.npz'.format(con_method)) output_mean_fname = op.join(template_con, 'rest_{}_mean.npz'.format(con_method)) if op.isfile(output_fname) and not args.overwrite: print('Averaged connectivity already exist') return True tempalte_labels = lu.read_labels(args.template_brain, SUBJECTS_DIR, args.atlas) tempalte_labels = [ l for l in tempalte_labels if lu.get_label_hemi_invariant_name(l) not in args.labels_exclude ] all_con = None subjects_num = 0 good_subjects = [] for subject in args.subject: fmri_con_fname = op.join(MMVT_DIR, subject, 'connectivity', 'fmri_{}.npy'.format(con_method)) if not op.isfile(fmri_con_fname): print('{} is missing!'.format(fmri_con_fname)) continue subject_con = np.load(fmri_con_fname) print('{}: con.shape {}'.format(subject, subject_con.shape)) if subject_con.shape[0] != len(tempalte_labels): print('Wrong number of cortical labels!') continue print(np.min(subject_con), np.max(subject_con)) if all_con is None: all_con = np.zeros(subject_con.shape) all_con += subject_con subjects_num += 1 good_subjects.append(subject) all_con /= subjects_num for w in all_con.shape[2]: all_con[:, :, w] = sym_mat(all_con[:, :, w]) np.savez(output_fname, con=all_con, names=[l.name for l in tempalte_labels]) threshold = np.percentile(all_con, threshold_perc) np.savez(output_mean_fname, con=all_con.mean(axis=2), threshold=threshold, labels=[l.name for l in tempalte_labels]) print('Good subjects: {}'.format(good_subjects))
def check_labels(): from src.utils import labels_utils as lu labels = np.load(op.join(root_path, 'labels_names.npy')) labels = lu.read_labels('fsaverage', SUBJECTS_DIR, 'laus125', only_names=True, sorted_according_to_annot_file=True) # labels = np.array(labels) print([(ind, l) for ind, l in enumerate(labels) if l.startswith('unk')]) print([(ind, l) for ind, l in enumerate(labels) if l.startswith('corp')]) remove_ids = np.array([1, 5, 114, 118]) - 1 print('asdf')
def pre_processing(subject, modality, atlas, empty_fname, overwrite=False, n_jobs=4): # 0.0) calc fwd inv calc_fwd_inv(subject, run_num, modality, raw_fname, empty_fname, bad_channels, overwrite, n_jobs) # 0.1) If there is a problem with smoothing the surfaces, you should delete the morphing maps first # delete_morphing_maps(subject) # 0.2) Finds the trans file trans_file = meg.find_trans_file(subject=subject) # 0.3) Make sure we have a morph map, and if not, create it here, and not in the parallel function mne.surface.read_morph_map(subject, subject, subjects_dir=SUBJECTS_DIR) # 0.4) Make sure the label exist, if not, create them anat.create_annotation(subject, atlas, n_jobs=n_jobs) labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, n_jobs=n_jobs) if len(labels) == 0: raise Exception('No {} labels!'.format(atlas))
def find_vertices(subject, run_num, modality='meg', atlas='aparc.DKTatlas'): from src.utils import labels_utils as lu root_dir = op.join(EEG_DIR if modality == 'eeg' else MEG_DIR, subject) inv_fname = op.join( root_dir, '{}-epilepsy{}-{}-inv.fif'.format(subject, run_num, modality)) inv = mne.minimum_norm.read_inverse_operator(inv_fname) labels = lu.read_labels(subject, SUBJECTS_DIR, atlas) vertices = [] for ind, label in enumerate(labels): _, vertno = mne.minimum_norm.inverse.label_src_vertno_sel( label, inv['src']) vertices.extend(vertno) np.save(op.join(MMVT_DIR, subject, 'labels_verts.npy'), np.array(vertices)) return vertices
def calc_distances_from_rois(subject, dist_threshold=0.05): from scipy.spatial.distance import cdist import nibabel as nib dipoles_dict = utils.load(op.join(MMVT_DIR, subject, 'meg', 'dipoles.pkl')) labels_times_fol = op.join(MMVT_DIR, subject, 'meg', 'time_accumulate') labels = lu.read_labels(subject, SUBJECTS_DIR, 'laus125') labels_center_of_mass = lu.calc_center_of_mass(labels) labels_pos = np.array([labels_center_of_mass[l.name] for l in labels]) labels_dict = {l.name: labels_center_of_mass[l.name] for l in labels} outer_skin_surf_fname = op.join(SUBJECTS_DIR, subject, 'surf', 'lh.seghead') outer_skin_surf_verts, _ = nib.freesurfer.read_geometry(outer_skin_surf_fname) for dipole_name, dipoles in dipoles_dict.items(): dipole_pos = np.array([dipoles[0][2], dipoles[0][3], dipoles[0][4]]) lables_times_fname = op.join(labels_times_fol, '{}_labels_times.txt'.format(dipole_name)) if not op.isfile(lables_times_fname): print('Can\'t find {}!'.format(lables_times_fname)) continue dists_from_outer_skin = np.min(cdist(outer_skin_surf_verts * 0.001, [dipole_pos]), 0)[0] output_fname = op.join(labels_times_fol, '{}_labels_times_dists.txt'.format(dipole_name)) lines = utils.csv_file_reader(lables_times_fname, delimiter=':', skip_header=1) output, dists = [], [] labels_dists = cdist(labels_pos, [dipole_pos]) dists_argmin = np.argmin(labels_dists, 0)[0] dists_min = np.min(labels_dists, 0)[0] closest_label = labels[dists_argmin].name print('Parsing {} ({})'.format(dipole_name, closest_label)) for line in lines: if len(line) == 0: continue elif len(line) != 2: print('{}: Problem parsing "{}"'.format(lables_times_fname, line)) continue label_name, label_time = line label_pos = labels_dict.get(label_name, None) if label_pos is not None: dist_from_dipole = np.linalg.norm(dipole_pos - label_pos) dists.append(dist_from_dipole) else: dist_from_dipole = -1 dists.append(np.nan) output.append('{}: {} ({:.4f})'.format(label_name, label_time, dist_from_dipole)) for ind, dist in enumerate(dists): if dist < dist_threshold: output[ind] = '{} ***'.format(output[ind]) title = '{}: {} {:.4f} dist from outer skin: {:.4f} '.format( dipole_name, closest_label, dists_min, dists_from_outer_skin) utils.save_arr_to_file(output, output_fname, title)
def create_coloring(x, subject, atlas, conditions, colors_map='YlOrRd', exclude=['unknown', 'corpuscallosum'], colors_min_val=None, colors_max_val=None): labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, exclude=tuple(exclude), sorted_according_to_annot_file=True, only_names=True) for cond_id, cond_name in enumerate(conditions): values = x[:, cond_id] if colors_min_val is None: colors_min_val = np.min(x) if colors_max_val is None: colors_max_val = np.max(x) colors = utils.arr_to_colors(values, colors_min_val, colors_max_val, colors_map=colors_map) coloring_fname = op.join(MMVT_DIR, subject, 'coloring', 'labels_{}_coloring.csv'.format(cond_name)) write_coloring_file(coloring_fname, labels, colors) values_diff = np.squeeze(np.diff(x)) abs_max = max(map(abs, [np.max(values_diff), np.min(values_diff)])) colors = utils.mat_to_colors(values_diff, -abs_max, abs_max, 'RdBu', flip_cm=True) coloring_fname = op.join(MMVT_DIR, subject, 'coloring', 'labels_{}_{}_diff_coloring.csv'.format(*conditions)) write_coloring_file(coloring_fname, labels, colors)
def create_coloring(x, subject, atlas, conditions, colors_map='YlOrRd', exclude=['unknown', 'corpuscallosum'], colors_min_val=None, colors_max_val=None): labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, exclude=exclude, sorted_according_to_annot_file=True, only_names=True) for cond_id, cond_name in enumerate(conditions): values = x[:, cond_id] if colors_min_val is None: colors_min_val = np.min(x) if colors_max_val is None: colors_max_val = np.max(x) colors = utils.arr_to_colors(values, colors_min_val, colors_max_val, colors_map=colors_map) coloring_fname = op.join(MMVT_DIR, subject, 'coloring', 'labels_{}_coloring.csv'.format(cond_name)) write_coloring_file(coloring_fname, labels, colors) values_diff = np.squeeze(np.diff(x)) abs_max = max(map(abs, [np.max(values_diff), np.min(values_diff)])) colors = utils.mat_to_colors(values_diff, -abs_max, abs_max, 'RdBu', flip_cm=True) coloring_fname = op.join(MMVT_DIR, subject, 'coloring', 'labels_{}_{}_diff_coloring.csv'.format(*conditions)) write_coloring_file(coloring_fname, labels, colors)
def lables_stat(subject, atlas, excluded=('corpuscallosum', 'unknown')): all_labels = lu.read_labels(subject, SUBJECTS_DIR, atlas) labels, _ = lu.remove_exclude_labels(all_labels, excludes=excluded) print('Remove {} labels'.format(len(all_labels) - len(labels))) vertices_num = [len(l.vertices) for l in labels] print('vertivces num: {}-{}'.format(np.min(vertices_num), np.max(vertices_num))) labels_area = [] for l in tqdm(labels): vertices, faces = get_vertices_faces(l.vertices, subject, l.hemi) labels_area.append(tu.triangle_area(vertices, faces)) labels_area = np.array(labels_area) print('areas: min {}, max {}, mean {}, std {}'.format( np.min(labels_area), np.max(labels_area), np.mean(labels_area), np.std(labels_area))) fol = utils.make_dir(op.join(SUBJECTS_DIR, subject, 'label', atlas)) output_fname = op.join(fol, 'labels_stat.npz') np.savez(output_fname, labels_area=labels_area, vertices_num=vertices_num)
def calc_labels_center_of_mass(subject, atlas, read_from_annotation=True, surf_name='pial', labels_fol='', labels=None): import csv labels = lu.read_labels(subject, SUBJECTS_DIR, atlas) if len(labels) > 0: if np.all(labels[0].pos == 0): verts = {} for hemi in utils.HEMIS: verts[hemi], _ = utils.read_pial_npz(subject, MMVT_DIR, hemi) for label in labels: label.pos = verts[label.hemi][label.vertices] center_of_mass = lu.calc_center_of_mass(labels) with open(op.join(SUBJECTS_DIR, subject, 'label', '{}_center_of_mass.csv'.format(atlas)), 'w') as csvfile: writer = csv.writer(csvfile, delimiter=',') for label in labels: writer.writerow([label.name, *center_of_mass[label.name]]) com_fname = op.join(SUBJECTS_DIR, subject, 'label', '{}_center_of_mass.pkl'.format(atlas)) blend_fname = op.join(MMVT_DIR, subject, '{}_center_of_mass.pkl'.format(atlas)) utils.save(center_of_mass, com_fname) shutil.copyfile(com_fname, blend_fname) return len(labels) > 0 and op.isfile(com_fname) and op.isfile(blend_fname)
def save_hemis_curv(subject, atlas): out_curv_file = op.join(MMVT_DIR, subject, 'surf', '{hemi}.curv.npy') # out_border_file = op.join(MMVT_DIR, subject, 'surf', '{hemi}.curv.borders.npy') # if utils.both_hemi_files_exist(out_file): # return True for hemi in utils.HEMIS: # Load in curvature values from the ?h.curv file. if not op.isfile(out_curv_file.format(hemi=hemi)): curv_path = op.join(SUBJECTS_DIR, subject, 'surf', '{}.curv'.format(hemi)) curv = nib.freesurfer.read_morph_data(curv_path) bin_curv = np.array(curv > 0, np.int) np.save(out_curv_file.format(hemi=hemi), bin_curv) else: bin_curv = np.load(out_curv_file.format(hemi=hemi)) labels_fol = op.join(MMVT_DIR, subject, 'surf', '{}_{}_curves'.format(atlas, hemi)) utils.make_dir(labels_fol) labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, hemi=hemi) for label in labels: labels_curv = bin_curv[label.vertices] np.save(op.join(labels_fol, '{}_curv.npy'.format(label.name)), labels_curv) return utils.both_hemi_files_exist(out_curv_file) # and utils.both_hemi_files_exist(out_border_file)
def save_rois_connectivity(subject, args): # atlas, mat_fname, mat_field, conditions, stat=STAT_DIFF, windows=0, # labels_exclude=['unknown', 'corpuscallosum'], threshold=0, threshold_percentile=0, # color_map='jet', norm_by_percentile=True, norm_percs=(1, 99), symetric_colors=True): # args.atlas, args.mat_fname, args.mat_field, args.conditions, args.stat, # args.windows, args.labels_exclude, args.threshold, args.threshold_percentile, # args.color_map, args.norm_by_percentile, args.norm_percs) d = dict() data = sio.loadmat(args.mat_fname)[args.mat_field] d['labels'] = lu.read_labels( subject, SUBJECTS_DIR, args.atlas, exclude=args.labels_exclude,sorted_according_to_annot_file=True) d['locations'] = lu.calc_center_of_mass(d['labels'], ret_mat=True) * 1000 d['hemis'] = ['rh' if l.hemi == 'rh' else 'lh' for l in d['labels']] d['labels'] = [l.name for l in d['labels']] (d['con_colors'], d['con_indices'], d['con_names'], d['con_values'], d['con_types'], d['data_max'], d['data_min']) = calc_connections_colors(data, d['labels'], d['hemis'], args) # args.stat, args.conditions, args.windows, args.threshold, # args.threshold_percentile, args.color_map, args.norm_by_percentile, args.norm_percs, args.symetric_colors) d['conditions'] = args.conditions np.savez(op.join(MMVT_DIR, subject, 'rois_con'), **d)
def calc_cortical_histograms(subject, scan_rescan, atlas, low_threshold=40, high_threshold=100, overwrite=False, do_plot=True, n_jobs=4): if not lu.check_labels(subject, atlas, SUBJECTS_DIR, MMVT_DIR): return False output_fol = utils.make_dir( op.join(MMVT_DIR, subject, 'ASL', scan_rescan, 'labels_hists_{}'.format(atlas))) output_fname = op.join(MMVT_DIR, subject, 'ASL', scan_rescan, 'labels_hists_{}.pkl'.format(atlas)) if overwrite: utils.delete_folder_files(output_fol) print('Saving figures into {}'.format(output_fol)) labels_data = {} for hemi in utils.HEMIS: npy_data_fname = op.join( MMVT_DIR, subject, 'fmri', 'fmri_CBF_{}_{}.npy'.format(scan_rescan, hemi)) if not op.isfile(npy_data_fname): print('Cannot find hemi data file! ({})'.format(npy_data_fname)) return False surf_fname = op.join(SUBJECTS_DIR, subject, 'surf', '{}.pial'.format(hemi)) if not op.isfile(surf_fname): print('Cannot find surface file! ({})'.format(surf_fname)) return False hemi_vals = np.load(npy_data_fname) labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, hemi=hemi, n_jobs=n_jobs) labels_vertives_num = max([max(l.vertices) for l in labels]) hemi_data_vertices_num = len(hemi_vals) - 1 if labels_vertives_num != hemi_data_vertices_num: print( 'Wrong number of vertices or labels! labels_vertives_num ({}) != len({})' .format(labels_vertives_num, len(hemi_vals))) return False for label in tqdm(labels): fig_fname = op.join(output_fol, '{}.jpg'.format(label.name)) if op.isfile(fig_fname) and not overwrite: continue labels_data[label.name] = label_data = hemi_vals[label.vertices] outliers = np.where(label_data < low_threshold)[0] if len(outliers) > 0: print('{} has {}/{} values < {}'.format( label.name, len(outliers), len(label_data), low_threshold)) outliers = np.where(label_data > high_threshold)[0] if len(outliers) > 0: print('{} has {}/{} values > {}'.format( label.name, len(outliers), len(label_data), high_threshold)) label_data[label_data < low_threshold] = low_threshold label_data[label_data > high_threshold] = high_threshold if do_plot: plt.hist(label_data, bins=40) plt.savefig(fig_fname) plt.close() utils.save(labels_data, output_fname)
def post_meg_preproc(args): inv_method, em, atlas = 'dSPM', 'mean_flip', 'darpa_atlas' bands = dict(theta=[4, 8], alpha=[8, 15], beta=[15, 30], gamma=[30, 55], high_gamma=[65, 200]) norm_times = (500, 2500) do_plot = False subjects = args.subject res_fol = utils.make_dir( op.join(utils.get_parent_fol(MMVT_DIR), 'msit-ecr')) subjects_with_results = {} labels = lu.read_labels(subjects[0], SUBJECTS_DIR, atlas) labels_names = [l.name for l in labels] labels_num = len(labels_names) epochs_max_num = 50 template_brain = 'colin27' now = time.time() bands_power_mmvt_all = [] for subject_ind, subject in enumerate(subjects): utils.time_to_go(now, subject_ind, len(subjects), runs_num_to_print=1) subjects_with_results[subject] = {} input_fol = utils.make_dir( op.join(MEG_DIR, subject, 'labels_induced_power')) plots_fol = utils.make_dir(op.join(input_fol, 'plots')) args.subject = subject bands_power_mmvt = {'rh': {}, 'lh': {}} for task_ind, task in enumerate(args.tasks): task = task.lower() input_fnames = glob.glob( op.join( input_fol, '{}_*_{}_{}_induced_power.npz'.format( task, inv_method, em))) if len(input_fnames) < 1: # labels_num: print('No enough files for {} {}!'.format(subject, task)) subjects_with_results[subject][task] = False continue # input_dname = ecr_caudalanteriorcingulate-lh_dSPM_mean_flip_induced_power # if not do_plot: # continue bands_power = np.empty((len(bands), labels_num, epochs_max_num)) for input_fname in input_fnames: d = utils.Bag(np.load(input_fname)) # label_name, atlas, data # label_power = np.empty((len(bands), epochs_num, T)) (5, 50, 3501) label_power, label_name = d.data, d.label_name # for band_ind in range(len(bands)): # label_power[band_ind] /= label_power[band_ind][:, norm_times[0]:norm_times[1]].mean() label_ind = labels_names.index(label_name) hemi = labels[label_ind].hemi for band_ind, band in enumerate(bands.keys()): label_power_norm = label_power[ band_ind][:, norm_times[0]:norm_times[1]].mean( axis=1)[:epochs_max_num] if len(label_power_norm) != epochs_max_num: print('{} does have {} epochs!'.format( input_fname, len(label_power_norm))) break bands_power[band_ind, label_ind] = label_power_norm if band not in bands_power_mmvt[hemi]: bands_power_mmvt[hemi][band] = np.empty( (len(labels_names), label_power[band_ind].shape[1], 1, len(args.tasks))) bands_power_mmvt[hemi][band][ label_ind, :, 0, task_ind] = label_power[band_ind].mean(axis=0) fig_fname = op.join(plots_fol, 'power_{}_{}.jpg'.format(label_name, task)) if do_plot: # not op.isfile(fig_fname) and times = np.arange( 0, label_power.shape[2]) if 'times' not in d else d.times plot_label_power(label_power, times, label_name, bands, task, fig_fname) for band_ind, band in enumerate(bands.keys()): power_fname = op.join( res_fol, subject, '{}_labels_{}_{}_{}_power.npz'.format( task.lower(), inv_method, em, band)) np.savez(power_fname, data=np.array(bands_power[band_ind]), names=labels_names) subjects_with_results[subject][task] = True if all(subjects_with_results[subject].values()): bands_power_mmvt_all.append(bands_power_mmvt) else: print('{} does not have both tasks data!'.format(subject)) labels_data_template = op.join(MMVT_DIR, template_brain, 'meg', 'labels_data_power_{}_{}_{}_{}_{}.npz' ) # task, atlas, extract_method, hemi for hemi in utils.HEMIS: for band_ind, band in enumerate(bands.keys()): power = np.array([x[hemi][band] for x in bands_power_mmvt_all]).mean(axis=0) labels_output_fname = meg.get_labels_data_fname( labels_data_template, inv_method, band, atlas, em, hemi) utils.make_dir(utils.get_parent_fol(labels_output_fname)) np.savez(labels_output_fname, data=power, names=labels_names, conditions=args.tasks) have_all = len([ subject for subject, results in subjects_with_results.items() if all(results.values()) ]) print('{}/{} with all files'.format(have_all, len(subjects))) print(subjects_with_results)
def parcelate(subject, atlas, hemi, surface_type, vertices_labels_ids_lookup=None, overwrite_vertices_labels_lookup=False): output_fol = op.join(MMVT_DIR, subject, 'labels', '{}.{}.{}'.format(atlas, surface_type, hemi)) utils.make_dir(output_fol) vtx, fac = utils.read_ply_file( op.join(MMVT_DIR, subject, 'surf', '{}.{}.ply'.format(hemi, surface_type))) if vertices_labels_ids_lookup is None or overwrite_vertices_labels_lookup: vertices_labels_ids_lookup = lu.create_vertices_labels_lookup( subject, atlas, True, overwrite_vertices_labels_lookup)[hemi] labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, hemi=hemi) if 'unknown-{}'.format(hemi) not in [l.name for l in labels]: labels.append(lu.Label([], name='unknown-{}'.format(hemi), hemi=hemi)) nV = vtx.shape[0] nF = fac.shape[0] nL = len(labels) # print('The number of unique labels is {}'.format(nL)) vtxL = [[] for _ in range(nL)] facL = [[] for _ in range(nL)] now = time.time() for f in range(nF): utils.time_to_go(now, f, nF, runs_num_to_print=50000) # Current face & labels Cfac = fac[f] Cidx = [vertices_labels_ids_lookup[vert_ind] for vert_ind in Cfac] # Depending on how many vertices of the current face # are in different labels, behave differently # nuCidx = len(np.unique(Cidx)) # if nuCidx == 1: # If all vertices share same label # same_label = utils.all_items_equall(Cidx) # if same_label: if Cidx[0] == Cidx[1] == Cidx[2]: # Add the current face to the list of faces of the # respective label, and don't create new faces facL[Cidx[0]] += [Cfac.tolist()] else: # If 2 or 3 vertices are in different labels # Create 3 new vertices at the midpoints of the 3 edges vtxCfac = vtx[Cfac] vtxnew = (vtxCfac + vtxCfac[[1, 2, 0]]) / 2 vtx = np.concatenate((vtx, vtxnew)) # Define 4 new faces, with care preserve normals (all CCW) facnew = [[Cfac[0], nV, nV + 2], [nV, Cfac[1], nV + 1], [nV + 2, nV + 1, Cfac[2]], [nV, nV + 1, nV + 2]] # Update nV for the next loop nV = vtx.shape[0] # Add the new faces to their respective labels facL[Cidx[0]] += [facnew[0]] facL[Cidx[1]] += [facnew[1]] facL[Cidx[2]] += [facnew[2]] freq_Cidx = mode(Cidx) facL[freq_Cidx] += [facnew[3]] # central face # Having defined new faces and assigned all faces to labels, now # select the vertices and redefine faces to use the new vertex indices # Also, create the file for the indices # fidx = fopen(sprintf('%s.index.csv', srfprefix), 'w'); # params = [] # for lab in range(nL): # facL_lab = facL[lab] # facL_lab_flat = utils.list_flatten(facL_lab) # vidx = list(set(facL_lab_flat)) # vtxL_lab = vtx[vidx] # params.append((facL_lab, vtxL_lab, vidx, nV, labels[lab].name, hemi, output_fol)) # utils.run_parallel(writing_ply_files_parallel, params, njobs=n_jobs) # ret = True for lab in range(nL): ret = ret and writing_ply_files(subject, surface_type, lab, facL[lab], vtx, vtxL, labels, hemi, output_fol) return ret
def merge_meg_connectivity(args): inv_method, em = 'dSPM', 'mean_flip' con_method, con_mode = 'pli2_unbiased', 'multitaper' bands = dict(theta=[4, 8], alpha=[8, 15], beta=[15, 30], gamma=[30, 55], high_gamma=[65, 200]) threshold_perc = 90 template_con = utils.make_dir( op.join(MMVT_DIR, args.template_brain, 'connectivity')) output_fname = op.join( template_con, 'rest_{}_{}_{}.npz'.format(em, con_method, con_mode)) static_output_fname = op.join( template_con, 'meg_rest_{}_{}_{}_{}_mean.npz'.format(em, con_method, con_mode, '{band}')) if op.isfile(output_fname) and not args.overwrite: print('Averaged connectivity already exist') return True org_tempalte_labels = lu.read_labels(args.template_brain, SUBJECTS_DIR, args.atlas) tempalte_labels = [ l for l in org_tempalte_labels if lu.get_label_hemi_invariant_name(l) not in args.labels_exclude ] include_indices = [ ind for ind, l in enumerate(org_tempalte_labels) if lu.get_label_hemi_invariant_name(l) not in args.labels_exclude ] all_con = None subjects_num = 0 good_subjects = [] for subject in args.subject: meg_con_fname = op.join( MMVT_DIR, subject, 'connectivity', 'rest_{}_{}_{}.npz'.format(em, con_method, con_mode)) if not op.isfile(meg_con_fname): continue con_dict = utils.Bag(np.load(meg_con_fname)) # print('{}: con.shape {}'.format(subject, con_dict.con.shape)) if con_dict.con.shape[0] != len(org_tempalte_labels): print('Wrong number of cortical labels!') continue subject_con = con_dict.con[np.ix_(include_indices, include_indices)] if subject_con.shape[0] != len(tempalte_labels) or subject_con.shape[ 1] != len(tempalte_labels): print('Wrong number of cortical labels!') continue # print(np.min(subject_con), np.max(subject_con)) if np.isnan(np.max(subject_con)): print('nan in data!') continue if all_con is None: all_con = np.zeros(subject_con.shape) for l in range(subject_con.shape[2]): subject_con[:, :, l] = sym_mat(subject_con[:, :, l]) all_con += subject_con subjects_num += 1 good_subjects.append(subject) all_con /= subjects_num np.savez(output_fname, con=all_con, names=[l.name for l in tempalte_labels]) for band_ind, band in enumerate(bands.keys()): threshold = np.percentile(all_con[:, :, band_ind], threshold_perc) np.savez(static_output_fname.format(band=band), con=all_con[:, :, band_ind], threshold=threshold, labels=[l.name for l in tempalte_labels]) print('Good subjects: {}'.format(good_subjects))