def calc_hesheng_surf(subject, atlas): subject_fol = op.join(fmri.MMVT_DIR, subject, 'fmri') if not (utils.both_hemi_files_exist( op.join(subject_fol, 'fmri_hesheng_{hemi}.npy')) and op.isfile(op.join(subject_fol, 'hesheng_minmax.pkl'))): # Copy and rename Hesheng's files hesheng_fnames = glob.glob( op.join(hesheng_surf_fol.format(subject=subject), hesheng_template.format(subject=subject))) for fname in hesheng_fnames: hemi = lu.get_label_hemi_invariant_name(utils.namebase(fname)) target_file = op.join(fmri.FMRI_DIR, subject, 'hesheng_{}.nii.gz'.format(hemi)) mgz_target_file = utils.change_fname_extension(target_file, 'mgz') if not op.isfile(mgz_target_file): shutil.copy(fname, target_file) fu.nii_gz_to_mgz(target_file) os.remove(target_file) # Load Hesheng's files args = fmri.read_cmd_args( dict(subject=subject, atlas=atlas, function='load_surf_files', overwrite_surf_data=True, fmri_file_template='hesheng_{hemi}.mgz')) pu.run_on_subjects(args, fmri.main)
def create_aparc_aseg_file(subject, atlas, overwrite_aseg_file=False, print_only=False, args={}): if not utils.both_hemi_files_exist(op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format('{hemi}', atlas))): print('No annot file was found for {}!'.format(atlas)) print('Run python -m src.preproc.anatomy -s {} -a {} -f create_surfaces,create_annotation'.format(subject, atlas)) return False # aparc_aseg_fname ret = fu.create_aparc_aseg_file( subject, atlas, SUBJECTS_DIR, overwrite_aseg_file, print_only, mmvt_args=args) if isinstance(ret, Iterable): ret, aparc_aseg_fname = ret if not ret: return False aparc_aseg_file = utils.namebase_with_ext(aparc_aseg_fname) utils.make_dir(op.join(MMVT_DIR, subject, 'freeview')) blender_file = op.join(MMVT_DIR, subject, 'freeview', aparc_aseg_file) utils.remove_file(blender_file) shutil.copyfile(aparc_aseg_fname, blender_file) atlas_mat_fname = utils.change_fname_extension(blender_file, 'npy') if not op.isfile(atlas_mat_fname) or overwrite_aseg_file: d = nib.load(blender_file) x = d.get_data() np.save(atlas_mat_fname, x) return op.isfile(blender_file) and op.isfile(atlas_mat_fname)
def obj_to_ply(): fol = '/homes/5/npeled/space1/Angelique/Lionel Recon/blender/objs' files = glob.glob(op.join(fol, '*.obj')) for f in files: print(f) verts, faces = utils.read_obj_file(f) ply_fname = utils.change_fname_extension(f, 'ply') utils.write_ply_file(verts, faces, ply_fname, True)
def main(html_template_fname, scan_fol, patient_name, task_name, mrn, scan_date, img_prefix=None, report_name_suffix='', output_fname=''): if utils.get_parent_fol(html_template_fname) != scan_fol: shutil.copy(html_template_fname, scan_fol) if output_fname == '': output_fname = op.join(scan_fol, '{}{}.pdf'.format(mrn, '_{}'.format(report_name_suffix) \ if report_name_suffix != '' else '')) new_html_fname = utils.change_fname_extension(output_fname, 'html') if img_prefix == 'auto': img_prefix = utils.find_common_start([utils.namebase(f) for f in glob.glob(op.join(scan_fol, '*.png'))]) img_prefix = img_prefix[:-1] if img_prefix[-1] == '_' else img_prefix img_prefix = img_prefix[:-2] if img_prefix[-2:] == '_l' else img_prefix html = read_html(html_template_fname) html = replace_fields(html, patient_name, task_name, mrn, scan_date, img_prefix) create_new_html(html, new_html_fname) create_pdf(new_html_fname, output_fname) os.remove(op.join(scan_fol, utils.namebase_with_ext(html_template_fname)))
def create_lut_file_for_atlas(subject, atlas): if not utils.both_hemi_files_exist( op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format( '{hemi}', atlas))): print('No annot file was found for {}!'.format(atlas)) print( 'Run python -m src.preproc.anatomy -s {} -a {} -f create_surfaces,create_annotation' .format(subject, atlas)) return False # Read the subcortical segmentation from the freesurfer lut new_lut_fname = op.join(SUBJECTS_DIR, subject, 'label', '{}ColorLUT.txt'.format(atlas)) mmvt_lut_fname = op.join(MMVT_DIR, subject, 'freeview', '{}ColorLUT.txt'.format(atlas)) # if op.isfile(mmvt_lut_fname) and not args.overwrite_aseg_file: # return lut = utils.read_freesurfer_lookup_table(get_colors=True) lut_new = [[l[0], l[1].astype(str), l[2], l[3], l[4], l[5]] for l in lut if l[0] < 1000] for hemi, offset in zip(['lh', 'rh'], [1000, 2000]): if hemi == 'lh': lut_new.append([offset, 'ctx-lh-unknown', 25, 5, 25, 0]) else: lut_new.append([offset, 'ctx-rh-unknown', 25, 5, 25, 0]) _, ctab, names = _read_annot( op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format(hemi, atlas))) names = [name.astype(str) for name in names] for index, (label, cval) in enumerate(zip(names, ctab)): r, g, b, a, _ = cval lut_new.append([index + offset + 1, label, r, g, b, a]) lut_new.sort(key=lambda x: x[0]) # Add the values above 3000 for l in [l for l in lut if l[0] >= 3000]: lut_new.append([l[0], l[1].astype(str), l[2], l[3], l[4], l[5]]) with open(new_lut_fname, 'w') as fp: csv_writer = csv.writer(fp, delimiter='\t') csv_writer.writerows(lut_new) # np.savetxt(new_lut_fname, lut_new, delimiter='\t', fmt="%s") utils.make_dir(op.join(MMVT_DIR, subject, 'freeview')) shutil.copyfile(new_lut_fname, mmvt_lut_fname) lut_npz_fname = utils.change_fname_extension(mmvt_lut_fname, 'npz') x = np.genfromtxt(mmvt_lut_fname, dtype=np.str) np.savez(lut_npz_fname, names=x[:, 1], ids=x[:, 0].astype(int)) return op.isfile(mmvt_lut_fname) and op.isfile(lut_npz_fname)
def mat_to_ply(): fol = '/homes/5/npeled/space1/Angelique/Lionel Recon/' files = glob.glob(op.join(fol, 'Iso*.mat')) avg_fname = op.join(fol, 'blender', 'mean_vertices.npy') if not op.isfile(avg_fname): vertices_avg = [] for f in files: print(f) m = utils.read_mat_file_into_bag(f) vertices_avg.append(np.mean(m.vertices.T / 10, 0)) m.clear() vertices_avg = np.mean(vertices_avg) np.save(avg_fname, vertices_avg) else: vertices_avg = np.load(avg_fname) for f in files: print(f) m = utils.read_mat_file_into_bag(f) ply_fname = utils.change_fname_extension(f, 'ply') utils.write_ply_file(m.vertices.T / 10.0 - vertices_avg , m.faces.T, ply_fname, True) m.clear()
def plot_norm_data(x_cond, x_baseline, con_names, condition, threshold, nodes_names, stc_data, stc_times, windows_len=100, windows_shift=10, figures_fol='', ax=None, nodes_names_includes_hemi=False): # con_norm = x_cond - x_baseline # con_norm = x_cond - x_cond[:, :200].mean(axis=1, keepdims=True) # baseline_std = np.std(x_baseline, axis=1, keepdims=True) # baseline_mean = np.mean(x_baseline, axis=1, keepdims=True) windows_num = x_cond.shape[1] dt = (stc_times[-1] - stc_times[windows_len]) / windows_num time = np.arange(stc_times[windows_len], stc_times[-1], dt)[:-1] t0, t1 = np.where(time > -0.1)[0][0], np.where(time > 1)[0][0] # baseline_mean = np.max(x_cond[:, :t0], axis=1, keepdims=True) # baseline_std = np.std(x_cond[:, :t0], axis=1, keepdims=True) # con_norm = (x_cond - baseline_mean) / baseline_std con_norm = x_cond - x_baseline fig_fname = op.join(figures_fol, 'ictal-baseline', '{}-connectivity-ictal-baseline.jpg'.format(condition)) connection_fname = utils.change_fname_extension(fig_fname, 'pkl') norm = {} if ax is None: fig = plt.figure() ax = fig.add_subplot(111) conn_conditions = list(product(['within', 'between'], utils.HEMIS)) colors = ['c', 'b', 'k', 'm'] lines, labels = [], [] no_ord_con_names = [con_name.split(' ')[0] for con_name in con_names] connections = [] for conn_type, color in zip(conn_conditions, colors): mask = epi_utils.filter_connections( con_norm, no_ord_con_names, threshold, nodes_names, conn_type, use_abs=False, nodes_names_includes_hemi=nodes_names_includes_hemi) if sum(mask) == 0: print('{} no connections {}'.format(condition, conn_type)) continue else: print('{}: {} connection for {} {}'.format(condition, sum(mask), conn_type[0], conn_type[1])) names = np.array(con_names)[mask] norm[conn_type] = con_norm[mask] # print('windows num: {} windows length: {:.2f}ms windows shift: {:2f}ms'.format( # windows_num, (stc_times[windows_len] - stc_times[0]) * 1000, dt * 1000)) marker = '+' if conn_type[0] == 'within' else 'x' label_title = ' '.join( conn_type) if conn_type[0] == 'within' else '{} to {}'.format( *conn_type) first = True for k in range(norm[conn_type].shape[0]): first_sec_max = norm[conn_type][k][t0:t1].max() if norm[conn_type][k][t0:t1].max() > 2: # if conn_type[0] == 'between': first_sec_max_t = norm[conn_type][k][t0:t1].argmax() connections.append((time[first_sec_max_t + t0], label_title, first_sec_max, names[k])) l = ax.scatter(time, norm[conn_type][k], color=color) #, marker=marker) # .max(0) if first: lines.append(l) labels.append(label_title) first = False conn_type = (conn_type[0], 'right') if conn_type[1] == 'rh' else (conn_type[0], 'left') connections = sorted(connections) for con in connections: print(con) utils.save(connections, connection_fname) if stc_data is not None: ax2 = ax.twinx() l = ax2.plot(stc_times[windows_len:], stc_data[windows_len:].T, 'y--', alpha=0.2) # stc_data[:-100].T lines.append(l[0]) labels.append('Source normalized activity') # ax2.set_ylim([0.5, 4.5]) # ax2.set_xlim([]) # ax2.set_yticks(range(1, 5)) ax2.set_ylabel('Source z-values', fontsize=12) # ax.set_xticks(time) # xticklabels = ['{}-{}'.format(t, t + windows_shift) for t in time] # xticklabels[2] = '{}\nonset'.format(xticklabels[2]) # ax.set_xticklabels(xticklabels, rotation=30) ax.set_ylabel('Causality: Interictals\n minus Baseline', fontsize=12) # ax.set_yticks([0, 0.5]) ax.set_ylim(bottom=0) #, 0.7]) # ax.axvline(x=x_axis[10], color='r', linestyle='--') plt.title('{} ictal-baseline ({} connections)'.format( condition, x_cond.shape[0])) # labs = [*conn_conditions, 'Source normalized activity'] # ax.legend([l1[conn_conditions[k]][0] for k in range(4)] + l2, labs, loc=0) # ax.legend([l1[conn_conditions[0]]] + [l1[conn_conditions[1]]] + l2, labs, loc=0) ax.legend(lines, labels, loc='upper right') #loc=0) plt.axvline(x=0, linestyle='--', color='k') # if ax is None: if figures_fol != '': plt.savefig(fig_fname, dpi=300) print('Figure was saved in {}'.format(fig_fname)) plt.close() else: plt.show()