def create_movie(time_range, xticks, fol, dpi, fps, video_fname, cb_data_type, data_to_show_in_graph, cb_title='', cb_min_max_eq=True, cb_norm_percs=None, color_map='jet', bitrate=5000, fol2='', cb2_data_type='', cb2_title='', cb2_min_max_eq=True, color_map2='jet', ylim=(), ylabels=(), xticklabels=(), xlabel='Time (ms)', pics_type='png', show_first_pic=False, show_animation=False, overwrite=True, n_jobs=1): images1 = get_pics(fol, pics_type)[:len(time_range)] images1_chunks = utils.chunks(images1, len(images1) / n_jobs) if fol2 != '': images2 = get_pics(fol2, pics_type) if len(images2) != len(images1): raise Exception('fol and fol2 have different number of pictures!') images2_chunks = utils.chunks(images2, int(len(images2) / n_jobs)) else: images2_chunks = [''] * int(len(images1) / n_jobs) params = [(images1_chunk, images2_chunk, time_range, xticks, dpi, fps, video_fname, cb_data_type, data_to_show_in_graph, cb_title, cb_min_max_eq, cb_norm_percs, color_map, bitrate, ylim, ylabels, xticklabels, xlabel, show_first_pic, fol, fol2, cb2_data_type, cb2_title, cb2_min_max_eq, color_map2, run, show_animation, overwrite) for \ run, (images1_chunk, images2_chunk) in enumerate(zip(images1_chunks, images2_chunks))] n_jobs = utils.get_n_jobs(n_jobs) if n_jobs > 1: utils.run_parallel(_create_movie_parallel, params, n_jobs) video_name, video_type = op.splitext(video_fname) mu.combine_movies(fol, video_name, video_type[1:]) else: for p in params: _create_movie_parallel(p)
def read_cmd_args(argv=None): import argparse from src.utils import args_utils as au parser = argparse.ArgumentParser(description='MMVT stim preprocessing') parser.add_argument('-s', '--subject', help='subject name', required=True, type=au.str_arr_type) parser.add_argument('-c', '--conditions', help='conditions names', required=False, default='contrast', type=au.str_arr_type) parser.add_argument('-a', '--atlas', help='atlas name', required=False, default='aparc.DKTatlas40') parser.add_argument('-f', '--function', help='function name', required=False, default='all', type=au.str_arr_type) parser.add_argument('--exclude', help='functions not to run', required=False, default='', type=au.str_arr_type) parser.add_argument('--mat_fname', help='matlab connection file name', required=False, default='') parser.add_argument('--mat_field', help='matlab connection field name', required=False, default='') parser.add_argument('--labels_exclude', help='rois to exclude', required=False, default='unknown,corpuscallosum', type=au.str_arr_type) parser.add_argument('--norm_by_percentile', help='', required=False, default=1, type=au.is_true) parser.add_argument('--norm_percs', help='', required=False, default='1,99', type=au.int_arr_type) parser.add_argument('--stat', help='', required=False, default=STAT_DIFF, type=int) parser.add_argument('--windows', help='', required=False, default=0, type=int) parser.add_argument('--t_max', help='', required=False, default=0, type=int) parser.add_argument('--threshold_percentile', help='', required=False, default=0, type=int) parser.add_argument('--threshold', help='', required=False, default=0, type=float) parser.add_argument('--color_map', help='', required=False, default='jet') parser.add_argument('--symetric_colors', help='', required=False, default=1, type=au.is_true) parser.add_argument('--data_max', help='', required=False, default=0, type=float) parser.add_argument('--data_min', help='', required=False, default=0, type=float) parser.add_argument('--n_jobs', help='cpu num', required=False, default=-1) args = utils.Bag(au.parse_parser(parser, argv)) args.n_jobs = utils.get_n_jobs(args.n_jobs) if len(args.conditions) == 1: args.stat = STAT_AVG print(args) return args
def read_cmd_args(argv=None): import argparse from src.utils import args_utils as au parser = argparse.ArgumentParser(description='Description of your program') parser.add_argument('-s', '--subject', help='subject name', required=True) parser.add_argument('-f', '--function', help='function name', required=False, default='all') parser.add_argument('-c', '--contrast', help='contrast name', required=True) parser.add_argument('-a', '--atlas', help='atlas name', required=False, default='aparc.DKTatlas40') parser.add_argument('-t', '--threshold', help='clustering threshold', required=False, default='2') parser.add_argument('-T', '--task', help='task', required=True) parser.add_argument('--existing_format', help='existing format', required=False, default='mgz') parser.add_argument('--volume_type', help='volume type', required=False, default='mni305') parser.add_argument('--volume_name', help='volume file name', required=False, default='') parser.add_argument('--surface_name', help='surface_name', required=False, default='pial') parser.add_argument('--meg_subject', help='meg_subject', required=False, default='') parser.add_argument('--inverse_method', help='inverse method', required=False, default='dSPM') parser.add_argument('--n_jobs', help='cpu num', required=False, default=-1) args = utils.Bag(au.parse_parser(parser, argv)) args.n_jobs = utils.get_n_jobs(args.n_jobs) print(args) return args
def init_args(args): args.n_jobs = utils.get_n_jobs(args.n_jobs) if args.necessary_files == '': args.necessary_files = dict() args.subject = decode_subjects(args.subject) if 'sftp_password' not in args or args.sftp_password == '': args.sftp_password = utils.get_sftp_password( args.subject, SUBJECTS_DIR, args.necessary_files, args.sftp_username, args.overwrite_fs_files) \ if args.sftp else '' set_default_args(args) args.atlas = utils.get_real_atlas_name(args.atlas) os.environ['SUBJECTS_DIR'] = SUBJECTS_DIR return args
def run_on_subjects(args, main_func, subjects_itr=None, subject_func=None): if subjects_itr is None: subjects_itr = args.subject subjects_flags, subjects_errors = {}, {} args.n_jobs = utils.get_n_jobs(args.n_jobs) args.sftp_password = utils.get_sftp_password( args.subject, SUBJECTS_DIR, args.necessary_files, args.sftp_username, args.overwrite_fs_files) \ if args.sftp else '' if '*' in args.subject: args.subject = [utils.namebase(fol) for fol in glob.glob(op.join(SUBJECTS_DIR, args.subject))] os.environ['SUBJECTS_DIR'] = SUBJECTS_DIR for tup in subjects_itr: subject = get_subject(tup, subject_func) utils.make_dir(op.join(MMVT_DIR, subject, 'mmvt')) remote_subject_dir = utils.build_remote_subject_dir(args.remote_subject_dir, subject) print('****************************************************************') print('subject: {}, atlas: {}'.format(subject, args.atlas)) print('remote dir: {}'.format(remote_subject_dir)) print('****************************************************************') os.environ['SUBJECT'] = subject flags = dict() if args.necessary_files == '': args.necessary_files = dict() try: if utils.should_run(args, 'prepare_local_subjects_folder'): # *) Prepare the local subject's folder flags['prepare_local_subjects_folder'] = prepare_local_subjects_folder( subject, remote_subject_dir, args) if not flags['prepare_local_subjects_folder'] and not args.ignore_missing: ans = input('Do you which to continue (y/n)? ') if not au.is_true(ans): continue flags = main_func(tup, remote_subject_dir, args, flags) subjects_flags[subject] = flags except: subjects_errors[subject] = traceback.format_exc() print('Error in subject {}'.format(subject)) print(traceback.format_exc()) errors = defaultdict(list) for subject, flags in subjects_flags.items(): print('subject {}:'.format(subject)) for flag_type, val in flags.items(): print('{}: {}'.format(flag_type, val)) if not val: errors[subject].append(flag_type) if len(errors) > 0: print('Errors:') for subject, error in errors.items(): print('{}: {}'.format(subject, error))
def read_cmd_args(argv=None): import argparse from src.utils import args_utils as au parser = argparse.ArgumentParser(description='MMVT anatomy preprocessing') parser.add_argument('-s', '--subject', help='subject name', required=True, type=au.str_arr_type) parser.add_argument('-a', '--atlas', help='atlas name', required=False, default='aparc.DKTatlas40') parser.add_argument('-f', '--function', help='functions to run', required=False, default='all', type=au.str_arr_type) parser.add_argument('--exclude', help='functions not to run', required=False, default='', type=au.str_arr_type) parser.add_argument('--ignore_missing', help='ignore missing files', required=False, default=0, type=au.is_true) parser.add_argument('--fsaverage', help='fsaverage', required=False, default='fsaverage') parser.add_argument('--remote_subject_dir', help='remote_subject_dir', required=False, default='') parser.add_argument('--remote_subjects_dir', help='remote_subjects_dir', required=False, default='') parser.add_argument('--surf_name', help='surf_name', required=False, default='pial') parser.add_argument('--overwrite', help='overwrite', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_fs_files', help='overwrite freesurfer files', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_annotation', help='overwrite_annotation', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_morphing_labels', help='overwrite_morphing_labels', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_hemis_srf', help='overwrite_hemis_srf', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_labels_ply_files', help='overwrite_labels_ply_files', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_faces_verts', help='overwrite_faces_verts', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_ply_files', help='overwrite_ply_files', required=False, default=0, type=au.is_true) parser.add_argument('--solve_labels_collisions', help='solve_labels_collisions', required=False, default=0, type=au.is_true) parser.add_argument('--morph_labels_from_fsaverage', help='morph_labels_from_fsaverage', required=False, default=1, type=au.is_true) parser.add_argument('--fs_labels_fol', help='fs_labels_fol', required=False, default='') parser.add_argument('--sftp', help='copy subjects files over sftp', required=False, default=0, type=au.is_true) parser.add_argument('--sftp_username', help='sftp username', required=False, default='') parser.add_argument('--sftp_domain', help='sftp domain', required=False, default='') parser.add_argument('--print_traceback', help='print_traceback', required=False, default=1, type=au.is_true) parser.add_argument('--n_jobs', help='cpu num', required=False, default=-1) args = utils.Bag(au.parse_parser(parser, argv)) args.necessary_files = {'mri': ['aseg.mgz', 'norm.mgz', 'ribbon.mgz'], 'surf': ['rh.pial', 'lh.pial', 'rh.sphere.reg', 'lh.sphere.reg', 'lh.white', 'rh.white', 'rh.smoothwm','lh.smoothwm']} args.n_jobs = utils.get_n_jobs(args.n_jobs) if args.overwrite: args.overwrite_annotation = True args.overwrite_morphing_labels = True args.overwrite_hemis_srf = True args.overwrite_labels_ply_files = True args.overwrite_faces_verts = True args.overwrite_fs_files = True print(args) return args
def read_cmd_args(argv=None): import argparse from src.utils import args_utils as au parser = argparse.ArgumentParser(description='MMVT anatomy preprocessing') parser.add_argument('-s', '--subject', help='subject name', required=True, type=au.str_arr_type) parser.add_argument('-a', '--atlas', help='atlas name', required=False, default='aparc.DKTatlas40') parser.add_argument('-f', '--function', help='functions to run', required=False, default='all', type=au.str_arr_type) parser.add_argument('--exclude', help='functions not to run', required=False, default='', type=au.str_arr_type) parser.add_argument('--ignore_missing', help='ignore missing files', required=False, default=0, type=au.is_true) parser.add_argument('--fsaverage', help='fsaverage', required=False, default='fsaverage') parser.add_argument('--remote_subject_dir', help='remote_subject_dir', required=False, default='') parser.add_argument('--surf_name', help='surf_name', required=False, default='pial') parser.add_argument('--overwrite', help='overwrite', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_fs_files', help='overwrite freesurfer files', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_annotation', help='overwrite_annotation', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_morphing_labels', help='overwrite_morphing_labels', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_hemis_srf', help='overwrite_hemis_srf', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_labels_ply_files', help='overwrite_labels_ply_files', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_faces_verts', help='overwrite_faces_verts', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_ply_files', help='overwrite_ply_files', required=False, default=0, type=au.is_true) parser.add_argument('--solve_labels_collisions', help='solve_labels_collisions', required=False, default=0, type=au.is_true) parser.add_argument('--morph_labels_from_fsaverage', help='morph_labels_from_fsaverage', required=False, default=1, type=au.is_true) parser.add_argument('--fs_labels_fol', help='fs_labels_fol', required=False, default='') parser.add_argument('--sftp', help='copy subjects files over sftp', required=False, default=0, type=au.is_true) parser.add_argument('--sftp_username', help='sftp username', required=False, default='') parser.add_argument('--sftp_domain', help='sftp domain', required=False, default='') parser.add_argument('--print_traceback', help='print_traceback', required=False, default=1, type=au.is_true) parser.add_argument('--n_jobs', help='cpu num', required=False, default=-1) args = utils.Bag(au.parse_parser(parser, argv)) args.necessary_files = {'mri': ['aseg.mgz', 'norm.mgz', 'ribbon.mgz', 'T1.mgz'], 'surf': ['rh.pial', 'lh.pial', 'rh.inflated', 'lh.inflated', 'lh.curv', 'rh.curv', 'rh.sphere.reg', 'lh.sphere.reg', 'lh.white', 'rh.white', 'rh.smoothwm','lh.smoothwm'], 'mri:transforms' : ['talairach.xfm']} args.n_jobs = utils.get_n_jobs(args.n_jobs) if args.overwrite: args.overwrite_annotation = True args.overwrite_morphing_labels = True args.overwrite_hemis_srf = True args.overwrite_labels_ply_files = True args.overwrite_faces_verts = True args.overwrite_fs_files = True print(args) return args
# subject, clips_dict['ictal'], modality, inverse_method, seizure_times, windows_length, windows_shift) # calc_accumulate_stc_as_time( # subject, clips_dict['ictal'], modality, seizure_times, windows_length, windows_shift, mean_baseline, # inverse_method, n_jobs) # calc_rois_connectivity( # subject, clips_dict, modality, atlas, inverse_method, min_order, max_order, con_crop_times, onset_time, # windows_length, windows_shift, overwrite=True, n_jobs=n_jobs) normalize_connectivity( subject, clips_dict, modality, atlas, divide_by_baseline_std=False, threshold=0.5, reduce_to_3d=True, time_axis=np.mean(con_windows, 1), overwrite=False, n_jobs=n_jobs) # plot_connectivity(subject, clips_dict, modality, inverse_method) pass if __name__ == '__main__': n_jobs = utils.get_n_jobs(40) n_jobs = n_jobs if n_jobs >= 1 else 4 print('{} jobs'.format(n_jobs)) fif_files, clips_dict = [], {} atlas = 'aparc.DKTatlas' # 'laus125' run_num = 3 subject, remote_subject_dir, meg_fol, bad_channels, raw_fname, empty_room_fname = init_nmr01391() for subfol in ['baseline', 'ictal']: files = glob.glob(op.join(meg_fol, subfol, 'run{}_*.fif'.format(run_num))) fif_files += files clips_dict[subfol] = files main(subject, run_num, clips_dict, raw_fname, empty_room_fname, bad_channels, modality='meg',inverse_method='MNE', downsample_r=2, seizure_times=(-0.2, .5), windows_length=0.1, windows_shift=0.05, mean_baseline=10, atlas=atlas, min_cluster_size=50, min_order=1, max_order=20, con_windows_length=100, con_windows_shift=10,
# calc_stcs(subject, modality, clips_dict, inverse_method, downsample_r, overwrite=True, n_jobs=n_jobs) # calc_stc_zvals(subject, modality, clips_dict['ictal'], inverse_method, overwrite=True, n_jobs=n_jobs) # find_functional_rois( # subject, clips_dict['ictal'], modality, seizure_times, atlas, min_cluster_size, # inverse_method, overwrite=True, n_jobs=n_jobs) # calc_rois_connectivity( # subject, clips_dict, modality, inverse_method, min_order, max_order, con_crop_times, onset_time, # windows_length, windows_shift, overwrite=True, n_jobs=n_jobs) # normalize_connectivity( # subject, clips_dict['ictal'], modality, divide_by_baseline_std=False, # threshold=0.5, reduce_to_3d=True, overwrite=False, n_jobs=n_jobs) plot_connectivity(subject, clips_dict, modality, inverse_method) if __name__ == '__main__': n_jobs = utils.get_n_jobs(10) n_jobs = n_jobs if n_jobs > 1 else 1 print('{} jobs'.format(n_jobs)) fif_files, clips_dict = [], {} subject, remote_subject_dir, meg_fol, bad_channels, raw_fname, empty_room_fname = init_nmr01391( ) for subfol in ['baseline', 'ictal']: files = glob.glob(op.join(meg_fol, subfol, '*.fif')) fif_files += files clips_dict[subfol] = files main(subject, clips_dict, modality='meg', inverse_method='MNE',
default='') parser.add_argument('--print_only', required=False, default=False, type=au.is_true) parser.add_argument('--sftp', required=False, default=False, type=au.is_true) parser.add_argument('--sftp_username', help='sftp username', required=False, default='npeled') parser.add_argument('--sftp_domain', help='sftp domain', required=False, default='door.nmr.mgh.harvard.edu') parser.add_argument('--n_jobs', help='cpu num', required=False, default=1) args = utils.Bag(au.parse_parser(parser)) args.n_jobs = utils.get_n_jobs(args.n_jobs) if len(args.subject) == 0: args.subject = subjects with warnings.catch_warnings(): warnings.simplefilter("ignore") main(args.subject, template_system, remote_subject_templates, bipolar, save_as_bipolar, overwrite, prefix, args.sftp, args.sftp_username, args.sftp_domain, args.print_only, args.n_jobs) print('Done!') print('finish')
parser.add_argument('--pics_type', help='pics_type', required=False, default='png') parser.add_argument('--show_first_pic', help='show_first_pic', required=False, type=au.is_true, default=0) parser.add_argument('--images_folder', help='images_folder', required=False) parser.add_argument('--data_in_graph', help='data_in_graph', required=False) parser.add_argument('--time_range', help='time_range_from', required=False, type=au.float_arr_type) parser.add_argument('--xtick_dt', help='xtick_dt', required=False, type=float) parser.add_argument('--xlabel', help='xlabel', required=False) parser.add_argument('--ylabels', help='ylabels', required=False, type=au.str_arr_type) parser.add_argument('--xticklabels', help='xticklabels', required=False, type=au.str_arr_type) parser.add_argument('--ylim', help='ylim', required=False, type=au.float_arr_type) parser.add_argument('--do_show', help='do_show', required=False, type=au.is_true, default=0) parser.add_argument('--n_jobs', help='cpu num', required=False, default=-1) args = utils.Bag(au.parse_parser(parser)) args.xticklabels = au.str_arr_to_markers(args, 'xticklabels') print(args) n_jobs = utils.get_n_jobs(args.n_jobs) # fol = '/home/noam/Pictures/mmvt/mg99' # fol = '/homes/5/npeled/space1/Pictures/mmvt/stim/mg99/lvf6_5' # fol2 = '' # data_to_show_in_graph = 'stim' # video_fname = 'mg99_LVF6-5_stim.mp4' # cb_title = 'Electrodes PSD' # ylabels = ['Electrodes PSD'] # time_range = np.arange(-1, 1.5, 0.01) # xticks = [-1, -0.5, 0, 0.5, 1] # xticklabels = [(-1, 'stim onset'), (0, 'end of stim')] # ylim = (0, 500) # xlabel = 'Time(s)' # cb_data_type = 'stim' # cb_min_max_eq = False
label_name, label_time = line label_pos = labels_dict.get(label_name, None) if label_pos is not None: dist_from_dipole = np.linalg.norm(dipole_pos - label_pos) dists.append(dist_from_dipole) else: dist_from_dipole = -1 dists.append(np.nan) output.append('{}: {} ({:.4f})'.format(label_name, label_time, dist_from_dipole)) for ind, dist in enumerate(dists): if dist < dist_threshold: output[ind] = '{} ***'.format(output[ind]) title = '{}: {} {:.4f} dist from outer skin: {:.4f} '.format( dipole_name, closest_label, dists_min, dists_from_outer_skin) utils.save_arr_to_file(output, output_fname, title) if __name__ == '__main__': subject = 'nmr01426'# 'nmr01391' atlas = 'aparc.DKTatlas40' # 'laus125' # dip_fname = op.join(MEG_DIR, subject, 'run3_Ictal.dip') # _ictal dip_fname = '/autofs/space/frieda_003/users/valia/epilepsy_clin/6966926_1426/200618/1426_EPI_lang.dip' # dip_fname = op.join(MEG_DIR, subject, 'EPI.dip') # _ictal n_jobs = utils.get_n_jobs(20) n_jobs = n_jobs if n_jobs > 0 else 4 print('jobs: {}'.format(n_jobs)) #plot_dipole(dip_fname, subject) dipoles = parse_dip_file(dip_fname) mri_dipoles = convert_dipoles_to_mri_space(subject, dipoles, overwrite=True) # calc_distances_from_rois(subject) calc_dipoles_rois(subject, atlas=atlas, overwrite=True, n_jobs=n_jobs)
def run_on_subjects(args, main_func, subjects_itr=None, subject_func=None): if subjects_itr is None: subjects_itr = args.subject subjects_flags, subjects_errors = {}, {} args.n_jobs = utils.get_n_jobs(args.n_jobs) if args.necessary_files == '': args.necessary_files = dict() args.subject = decode_subjects(args.subject) if 'sftp_password' not in args or args.sftp_password == '': args.sftp_password = utils.get_sftp_password( args.subject, SUBJECTS_DIR, args.necessary_files, args.sftp_username, args.overwrite_fs_files) \ if args.sftp else '' set_default_args(args) os.environ['SUBJECTS_DIR'] = SUBJECTS_DIR for tup in subjects_itr: subject = get_subject(tup, subject_func) utils.make_dir(op.join(MMVT_DIR, subject, 'mmvt')) remote_subject_dir = utils.build_remote_subject_dir( args.remote_subject_dir, subject) args.atlas = utils.get_real_atlas_name(args.atlas) logging.info(args) print( '****************************************************************') print('subject: {}, atlas: {}'.format(subject, args.atlas)) print('remote dir: {}'.format(remote_subject_dir)) print( '****************************************************************') os.environ['SUBJECT'] = subject flags = dict() try: # if utils.should_run(args, 'prepare_subject_folder'): # I think we always want to run this # *) Prepare the local subject's folder flags['prepare_subject_folder'] = prepare_subject_folder( subject, remote_subject_dir, args) if not flags['prepare_subject_folder'] and not args.ignore_missing: ans = input('Do you wish to continue (y/n)? ') if not au.is_true(ans): continue flags['prepare_subject_folder'] = True flags = main_func(tup, remote_subject_dir, args, flags) subjects_flags[subject] = flags except: subjects_errors[subject] = traceback.format_exc() print('Error in subject {}'.format(subject)) print(traceback.format_exc()) errors = defaultdict(list) ret = True good_subjects, bad_subjects = [], [] logs_fol = utils.make_dir(op.join(MMVT_DIR, subject, 'logs')) logging.basicConfig(filename=op.join(logs_fol, 'preproc.log'), level=logging.DEBUG) for subject, flags in subjects_flags.items(): print('subject {}:'.format(subject)) logging.info('subject {}:'.format(subject)) for flag_type, val in flags.items(): print('{}: {}'.format(flag_type, val)) logging.info('{}: {}'.format(flag_type, val)) if not val: errors[subject].append(flag_type) if len(errors) > 0: ret = False print('Errors:') logging.info('Errors:') for subject, error in errors.items(): print('{}: {}'.format(subject, error)) logging.info('{}: {}'.format(subject, error)) for subject in subjects_flags.keys(): if len(errors[subject]) == 0: good_subjects.append(subject) else: bad_subjects.append(subject) print('Good subjects:\n {}'.format(good_subjects)) logging.info('Good subjects:\n {}'.format(good_subjects)) print('Bad subjects:\n {}'.format(bad_subjects)) logging.info('Good subjects:\n {}'.format(good_subjects)) utils.write_list_to_file( good_subjects, op.join(utils.get_logs_fol(), 'good_subjects.txt')) utils.write_list_to_file(bad_subjects, op.join(utils.get_logs_fol(), 'bad_subjects.txt')) return ret
g = nx.from_numpy_matrix(con_t) # x = nx.closeness_centrality(g) x = nx.degree_centrality(g) vals.append([x[k] for k in range(len(x))]) vals = np.array(vals) return vals, times_chunk def plot_values(subject, con_name, func_name, ma_win_size=10): vals = np.load(op.join(MMVT_DIR, subject, 'connectivity', '{}_{}.npy'.format(con_name, func_name))) # inds = np.argsort(np.max(vals, axis=1) - np.min(vals, axis=1))[::-1] # vals = vals[inds[:10]] # vals = utils.moving_avg(vals, ma_win_size) t_axis = np.linspace(-2, 5, vals.shape[1]) # plt.plot(t_axis, np.diff(vals).T) plt.plot(t_axis, vals.T) plt.title('{} {}'.format(con_name, func_name)) plt.show() if __name__ == '__main__': n_jobs = utils.get_n_jobs(-5) print('n_jobs: {}'.format(n_jobs)) subject = 'nmr00857' func_name = 'degree_centrality' #, #'closeness_centrality' # 'clustering' bands = dict(theta=[4, 8], alpha=[8, 15], beta=[15, 30], gamma=[30, 55], high_gamma=[65, 120]) for band_name in bands.keys(): con_name = 'meg_{}_mi'.format(band_name) # plot_con(subject, con_name) # calc_measures(subject, con_name, func_name, n_jobs) plot_values(subject, con_name, func_name)
if __name__ == '__main__': subject = os.environ['SUBJECT'] = '277S0203' os.environ['SUBJECTS_DIR'] = SUBJECTS_DIR sites = [ '131-NeuroBeh_ACH', '277-NDC', '800-Hoglund', '829-EmoryUniversity', '960-VitalImaging' ] months = [''] #, '_6', '_12'] atlas = 'aparc' # 'aparc.DKTatlas' low_threshold, high_threshold = 0, 100 cortex_frac_threshold = 0.9 overwrite = True print_only = False do_plot = False n_jobs = max(utils.get_n_jobs(30), 4) print('n_jobs: {}'.format(n_jobs)) # read_hippocampus_volumes() # calc_volume_fractions_all_subjects(subjects, site, overwrite, print_only) params = [] for site in sites: subjects = get_subjects(site) good_subjects = preproc_anat(subjects, overwrite_files=False) for sub_ind, subject in enumerate(good_subjects): # utils.time_to_go(now, sub_ind, len(subjects), 1) # get_labels_data(subject, atlas) subjects_0_6_12_dirs = [ op.join(HOME_FOL, site, '{}{}'.format(subject, month)) for month in months if op.isdir(