def create_ictal_clips(subject, ictal_events_dict, ictal_template, overwrite=False, n_jobs=4): mmvt_root = op.join(MMVT_DIR, subject, 'electrodes') data_files, baseline_files = [], [] meta = utils.Bag(np.load(op.join(mmvt_root, 'electrodes_meta_data.npz'))) for ictal_id, times in ictal_events_dict.items(): output_fname = op.join(mmvt_root, 'electrodes_data_{}.npy'.format(ictal_id)) baseline_fname = op.join(mmvt_root, 'electrodes_baseline_{}.npy'.format(ictal_id)) if op.isfile(output_fname) and op.isfile( baseline_fname) and not overwrite: data_files.append(output_fname) baseline_files.append(baseline_fname) continue event_fname = ictal_template.format(ictal_id=ictal_id) if not op.isfile(event_fname): print('Cannot find {}!'.format(event_fname)) continue args = electrodes.read_cmd_args( utils.Bag( subject=subject, function='create_raw_data_from_edf', task='seizure', bipolar=False, raw_fname=event_fname, start_time=0, seizure_onset=times[0] - 5, seizure_end=times[0] + 5, # times[1], baseline_onset=0, baseline_end=100, time_format='seconds', lower_freq_filter=1, upper_freq_filter=150, power_line_notch_widths=5, remove_baseline=False, normalize_data=False, factor=1000, overwrite_raw_data=True, n_jobs=n_jobs)) electrodes.call_main(args) temp_output_fname = op.join(mmvt_root, 'electrodes_data_diff.npy') if op.isfile(temp_output_fname): os.rename(temp_output_fname, output_fname) data_files.append(output_fname) else: print('{}: no data!'.format(ictal_id)) temp_baseline_fname = op.join(mmvt_root, 'electrodes_baseline.npy') if op.isfile(temp_baseline_fname): os.rename(temp_baseline_fname, baseline_fname) baseline_files.append(baseline_fname) else: print('{}: No baseline!'.format(ictal_id)) meta_fname = op.join(mmvt_root, 'electrodes_meta_data_diff.npz') if op.isfile(meta_fname): os.rename(meta_fname, op.join(mmvt_root, 'electrodes_meta_data.npz')) return data_files, baseline_files
def combine_meg_and_electrodes_power_spectrum(subject, inv_method='MNE', em='mean_flip', low_freq=None, high_freq=None, do_plot=True, overwrite=False): # https://martinos.org/mne/dev/generated/mne.time_frequency.psd_array_welch.html output_fname = op.join(MMVT_DIR, subject, 'electrodes', 'electrodes_data_power_spectrum_comparison.npz') # if op.isfile(output_fname) and not overwrite: # return True meg_ps_dict = utils.Bag( np.load(op.join(MMVT_DIR, subject, 'meg', 'rest_{}_{}_power_spectrum.npz'.format(inv_method, em)))) elecs_ps_dict = utils.Bag( np.load(op.join(MMVT_DIR, subject, 'electrodes', 'power_spectrum.npz'.format(inv_method, em)))) # Power Spectral Density (dB) meg_ps = 10 * np.log10(meg_ps_dict.power_spectrum.squeeze()) mask = np.where(meg_ps_dict.frequencies > 8)[0] np.argmax(np.sum(meg_ps[:, :, mask], axis=(1, 2))) plot_power_spectrum(meg_ps, meg_ps_dict.frequencies, 'MEG') meg_ps = meg_ps.mean(axis=0) elecs_ps = 10 * np.log10(elecs_ps_dict.power_spectrum.squeeze()) plot_power_spectrum(elecs_ps, elecs_ps_dict.frequencies, 'electrodes') elecs_ps = elecs_ps.mean(axis=0) meg_func = scipy.interpolate.interp1d(meg_ps_dict.frequencies, meg_ps) elecs_func = scipy.interpolate.interp1d(elecs_ps_dict.frequencies, elecs_ps) low_freq = int(max([min(meg_ps_dict.frequencies), min(elecs_ps_dict.frequencies), low_freq])) high_freq = int(min([max(meg_ps_dict.frequencies), max(elecs_ps_dict.frequencies), high_freq])) freqs_num = high_freq - low_freq + 1 frequencies = np.linspace(low_freq, high_freq, num=freqs_num * 10, endpoint=True) meg_ps_inter = meg_func(frequencies) meg_ps_inter = (meg_ps_inter - np.mean(meg_ps_inter)) / np.std(meg_ps_inter) elecs_ps_inter = elecs_func(frequencies) elecs_ps_inter = (elecs_ps_inter - np.mean(elecs_ps_inter)) / np.std(elecs_ps_inter) plot_all_results(meg_ps_inter, elecs_ps_inter, frequencies) electrodes_meta_fname = op.join(MMVT_DIR, subject, 'electrodes', 'electrodes_meta_data.npz') elecs_dict = utils.Bag(np.load(electrodes_meta_fname)) labels = elecs_dict.names data = np.zeros((len(labels), len(frequencies), 2)) data[:, :, 0] = elecs_ps_inter data[:, :, 1] = meg_ps_inter np.savez(output_fname, data=data, names=labels, conditions=['grid_rest', 'meg_rest']) if do_plot: plot_results(meg_ps_dict, elecs_ps_dict, frequencies, meg_ps, meg_ps_inter, elecs_ps, elecs_ps_inter)
def read_cmd_args(argv=None): import argparse parser = argparse.ArgumentParser(description='MMVT template preprocessing') parser.add_argument('--flag', help='', required=False, default='') pu.add_common_args(parser) args = utils.Bag(au.parse_parser(parser, argv)) return args
def read_cmd_args(argv=None): import argparse parser = argparse.ArgumentParser(description='UDP listener') parser.add_argument('-b', '--buffer_size', required=False, default=10, type=int) # parser.add_argument('-p', '--python_cmd', required=False, default='python') # parser.add_argument('-f', '--function', required=False, default='') return utils.Bag(au.parse_parser(parser, argv))
def read_cmd_args(argv): import argparse parser = argparse.ArgumentParser(description='MMVT freeview preprocessing') parser.add_argument('-b', '--bipolar', help='bipolar', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_aseg_file', help='overwrite_aseg_file', required=False, default=0, type=au.is_true) parser.add_argument('--create_volume_file', help='create_volume_file', required=False, default=1, type=au.is_true) parser.add_argument('--electrodes_pos_fname', help='electrodes_pos_fname', required=False, default='') parser.add_argument('--way_points', help='way_points', required=False, default=0, type=au.is_true) pu.add_common_args(parser) args = utils.Bag(au.parse_parser(parser, argv)) args.necessary_files = {'mri': ['T1.mgz', 'orig.mgz']} # print(args) return args
def load_edf_data_seizure_2(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, atlas='laus125', function='create_raw_data_for_blender', task='seizure', bipolar=args.bipolar, raw_fname=op.join( ELECTRODES_DIR, args.subject[0], 'DMphaseIISz_TG.edf' ), # '/cluster/neuromind/npeled/taha/dm04002705/edf/DMphaseIISz_TG.edf', start_time='00:00:00', seizure_onset='00:01:20', seizure_end='00:02:00', baseline_onset='00:00:00', baseline_end='00:01:00', lower_freq_filter=2, upper_freq_filter=70, power_line_notch_widths=5, ref_elec='PST1', normalize_data=False, calc_zscore=False, factor=1000, channels_names_mismatches='LFO=LIF')) pu.run_on_subjects(args, elecs.main)
def load_edf_data_seizure(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, atlas='laus125', function='create_raw_data_from_edf', task='seizure', bipolar=args.bipolar, raw_fname=op.join(ELECTRODES_DIR, args.subject[0], '{}.edf'.format(args.edf)), start_time='00:00:00', seizure_onset='00:01:20', seizure_end='00:02:00', baseline_onset='00:00:00', baseline_end='00:01:00', lower_freq_filter=1, upper_freq_filter=150, power_line_notch_widths=5, ref_elec='PST1', normalize_data=False, calc_zscore=False, factor=1000, # channels_names_mismatches='LFO=LIF' )) pu.run_on_subjects(args, elecs.main)
def export_into_csv(template_system, mmvt_dir, bipolar=False, prefix='', input_fname=''): template = 'fsaverage' if template_system == 'ras' else 'colin27' if template_system == 'mni' else template_system if input_fname == '': input_name = '{}electrodes{}_positions.npz'.format( prefix, '_bipolar' if bipolar else '') input_fname = op.join(mmvt_dir, template, 'electrodes', input_name) electrodes_dict = utils.Bag(np.load(input_fname)) fol = utils.make_dir(op.join(MMVT_DIR, template, 'electrodes')) csv_fname = op.join( fol, '{}{}_{}RAS.csv'.format(prefix, template, 'bipolar_' if bipolar else '')) print('Writing csv file to {}'.format(csv_fname)) with open(csv_fname, 'w') as csv_file: wr = csv.writer(csv_file, quoting=csv.QUOTE_NONE) wr.writerow(['Electrode Name', 'R', 'A', 'S']) for elc_name, elc_coords in zip(electrodes_dict.names, electrodes_dict.pos): wr.writerow([ elc_name, *['{:.2f}'.format(x) for x in elc_coords.squeeze()] ]) fol = utils.make_dir(op.join(MMVT_DIR, template, 'electrodes')) csv_fname2 = op.join(fol, utils.namebase_with_ext(csv_fname)) if csv_fname != csv_fname2: utils.copy_file(csv_fname, csv_fname2) print('export_into_csv: {}'.format( op.isfile(csv_fname) and op.isfile(csv_fname2))) return csv_fname
def read_cmd_args(argv=None): import argparse from src.utils import args_utils as au parser = argparse.ArgumentParser(description='MMVT template preprocessing') pu.add_common_args(parser) args = utils.Bag(au.parse_parser(parser)) # print(args) return args
def normalize_connectivity(subject, ictals_clips, modality, divide_by_baseline_std, threshold, reduce_to_3d, overwrite=False, n_jobs=6): connectivity_template = connectivity.get_output_fname( subject, 'gc', modality, 'mean_flip', 'all_{}_func_rois') for clip_fname in ictals_clips: clip_name = utils.namebase(clip_fname) output_fname = '{}_zvals.npz'.format( connectivity_template.format(clip_name)[:-4]) con_ictal_fname = connectivity_template.format(clip_name) con_baseline_fname = connectivity_template.format( '{}_baseline'.format(clip_name)) if not op.isfile(con_ictal_fname) or not op.isfile(con_baseline_fname): for fname in [ f for f in [con_ictal_fname, con_baseline_fname] if not op.isfile(f) ]: print('{} is missing!'.format(fname)) continue print('normalize_connectivity: {}:'.format(clip_name)) d_ictal = utils.Bag(np.load(con_ictal_fname, allow_pickle=True)) d_baseline = utils.Bag(np.load(con_baseline_fname, allow_pickle=True)) if reduce_to_3d: d_ictal.con_values = connectivity.find_best_ord( d_ictal.con_values, False) d_ictal.con_values2 = connectivity.find_best_ord( d_ictal.con_values2, False) d_baseline.con_values = connectivity.find_best_ord( d_baseline.con_values, False) d_baseline.con_values2 = connectivity.find_best_ord( d_baseline.con_values2, False) d_ictal.con_values = epi_utils.norm_values(d_baseline.con_values, d_ictal.con_values, divide_by_baseline_std, threshold, True) if 'con_values2' in d_baseline: d_ictal.con_values2 = epi_utils.norm_values( d_baseline.con_values2, d_ictal.con_values2, divide_by_baseline_std, threshold, True) print('Saving norm connectivity in {}'.format(output_fname)) np.savez(output_fname, **d_ictal)
def darpa(args): for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] args = anat.read_cmd_args(utils.Bag( subject=subject, remote_subject_dir=op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(darpa_subject)) )) pu.run_on_subjects(args, anat.main)
def calc_sorting_indices(subject, labels): meta_data = utils.Bag( utils.load( op.join(SUBJECTS_DIR, subject, 'electrodes_coh_meta_data.pkl'))) sorting_indices = np.array( utils.find_list_items_in_list(meta_data.electrodes, labels)) if -1 in sorting_indices: raise Exception('You should check your lalbels...') return sorting_indices
def get_ras_from_mad(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, function='get_ras_file', remote_ras_fol= '/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput' )) pu.run_on_subjects(args, elecs.main)
def calc_electrodes_con(args): # -s mg78 -a laus250 -f save_electrodes_coh --threshold_percentile 95 -c interference,non-interference args = con.read_cmd_args( utils.Bag(subject=args.subject, atlas='laus250', function='save_electrodes_coh', threshold_percentile=95, conditions='interference,non-interference')) pu.run_on_subjects(args, con.main)
def check_mmvt_file(subject): input_fname = op.join(MMVT_DIR, subject, 'electrodes', 'electrodes_data_power_spectrum_comparison.npz') d = utils.Bag(np.load(input_fname)) plt.figure() plt.plot(d.data[:, :, 0].T) plt.title(d.conditions[0]) plt.figure() plt.plot(d.data[:, :, 1].T) plt.title(d.conditions[1]) plt.show()
def read_cmd_args(argv=None): import argparse from src.utils import args_utils as au parser = argparse.ArgumentParser(description='Description of your program') parser.add_argument('-s', '--subject', help='subject name', required=True) parser.add_argument('-f', '--function', help='function name', required=False, default='all') parser.add_argument('-c', '--contrast', help='contrast name', required=True) parser.add_argument('-a', '--atlas', help='atlas name', required=False, default='aparc.DKTatlas40') parser.add_argument('-t', '--threshold', help='clustering threshold', required=False, default='2') parser.add_argument('-T', '--task', help='task', required=True) parser.add_argument('--existing_format', help='existing format', required=False, default='mgz') parser.add_argument('--volume_type', help='volume type', required=False, default='mni305') parser.add_argument('--volume_name', help='volume file name', required=False, default='') parser.add_argument('--surface_name', help='surface_name', required=False, default='pial') parser.add_argument('--meg_subject', help='meg_subject', required=False, default='') parser.add_argument('--inverse_method', help='inverse method', required=False, default='dSPM') parser.add_argument('--n_jobs', help='cpu num', required=False, default=-1) args = utils.Bag(au.parse_parser(parser, argv)) args.n_jobs = utils.get_n_jobs(args.n_jobs) print(args) return args
def darpa_sftp(args): for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] args = anat.read_cmd_args(utils.Bag( subject=subject, remote_subject_dir=op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(darpa_subject)), sftp=True, sftp_username='******', sftp_domain='door.nmr.mgh.harvard.edu', )) pu.run_on_subjects(args, anat.main)
def merge_t1_with_ct(subject, ct_threshold=None, ct_name='ct_reg_to_mr.mgz', overwrite=True): output_fname = op.join(MMVT_DIR, subject, 'ct', 't1_ct.mgz') if op.isfile(output_fname) and not overwrite: return True t1 = nib.load(op.join(SUBJECTS_DIR, subject, 'mri', 'T1.mgz')) t1_data = t1.get_data() ct_data = nib.load(op.join(MMVT_DIR, subject, 'ct', ct_name)).get_data() if ct_threshold is None: ct_threshold = np.percentile(ct_data, 99) ct_trans = utils.Bag( np.load(op.join(MMVT_DIR, subject, 'ct', 'ct_trans.npz'))) t1_trans = utils.Bag(np.load(op.join(MMVT_DIR, subject, 't1_trans.npz'))) print('Finding all voxels above {}'.format(ct_threshold)) ct_indices = np.where(ct_data > ct_threshold) ct_voxels = np.array(ct_indices).T ct_ras_coordinates = apply_trans(ct_trans.vox2ras, ct_voxels) t1_voxels = np.rint(apply_trans(t1_trans.ras2vox, ct_ras_coordinates)).astype(int) t1_data[(t1_voxels.T[0], t1_voxels.T[1], t1_voxels.T[2])] = ct_data[(ct_voxels.T[0], ct_voxels.T[1], ct_voxels.T[2])] t1_ct_mask = np.zeros(t1_data.shape, dtype=np.int8) t1_ct_mask[(t1_voxels.T[0], t1_voxels.T[1], t1_voxels.T[2])] = 1 np.save(op.join(MMVT_DIR, subject, 'ct', 't1_ct_mask.npy'), t1_ct_mask) img = nib.Nifti1Image(t1_data, t1.affine) nib.save(img, output_fname) save_images_data_and_header(subject, ct_name=output_fname, output_name='t1_ct_data', overwrite=True) np.savez(op.join(MMVT_DIR, subject, 'ct', 't1_ct_trans.npz'), ras_tkr2vox=t1_trans.ras_tkr2vox, vox2ras_tkr=t1_trans.vox2ras_tkr, vox2ras=t1_trans.vox2ras, ras2vox=t1_trans.ras2vox) return op.isfile(output_fname)
def load_coherence_meta_data_from_matlab(subject, matlab_electrodes_data_file): input_file = op.join(SUBJECTS_DIR, subject, 'electrodes', matlab_electrodes_data_file) d = utils.Bag(sio.loadmat(input_file)) d['electrodes'] = [e[0][0].astype(str) for e in d['electrodes']] for f in ['Tdurr', 'Toffset', 'dt']: d[f] = d[f][0][0] meta_data = { f: d[f] for f in d.keys() if f in ['Tdurr', 'Toffset', 'dt', 'electrodes'] } utils.save(meta_data, op.join(SUBJECTS_DIR, subject, 'electrodes_coh_meta_data.pkl'))
def convert_darpa_ct(args): bads, goods = [], [] if args.print_only: args.ignore_missing = True args.subject = pu.decode_subjects(args.subject) for subject in args.subject: local_ct_fol = utils.make_dir(op.join(pu.SUBJECTS_DIR, subject, 'ct')) ct_fname = op.join(local_ct_fol, 'ct', 'ct_org.mgz') if op.isfile(ct_fname) and not args.overwrite: goods.append(subject) continue darpa_subject = subject[:2].upper() + subject[2:] files = glob.glob(op.join( f'/homes/5/npeled/space1/Angelique/recon-alls/{darpa_subject}/', '**', 'ct.*'), recursive=True) if len(files) > 0: for fname in files: output_fname = op.join(local_ct_fol, utils.namebase_with_ext(fname)) print('Coping {} to {}'.format(fname, output_fname)) utils.copy_file(fname, output_fname) goods.append(subject) continue fols = glob.glob( op.join('/space/huygens/1/users/kara', f'{darpa_subject}_CT*')) ct_raw_input_fol = fols[0] if len(fols) == 1 else '' if not op.isdir(ct_raw_input_fol): fols = glob.glob(op.join( f'/homes/5/npeled/space1/Angelique/recon-alls/{darpa_subject}/', '**', f'{darpa_subject}_CT*'), recursive=True) ct_raw_input_fol = fols[0] if len(fols) == 1 else '' if not op.isdir(ct_raw_input_fol): bads.append(subject) continue args = ct.read_cmd_args( utils.Bag(subject=subject, function='convert_ct_to_mgz', ct_raw_input_fol=ct_raw_input_fol, print_only=args.print_only, ignore_missing=args.ignore_missing, overwrite=args.overwrite, ask_before=args.ask_before)) ret = pu.run_on_subjects(args, ct.main) if ret: goods.append(subject) else: bads.append(subject) print('Good subjects:\n {}'.format(goods)) print('Bad subjects:\n {}'.format(bads))
def get_t1_vertices_data(subject): trans_fname = op.join(MMVT_DIR, subject, 't1_trans.npz') trans_dict = utils.Bag(np.load(trans_fname)) ras_tkr2vox = np.linalg.inv(trans_dict.vox2ras_tkr) pial_verts = utils.load_surf(subject, MMVT_DIR, SUBJECTS_DIR) t1_data, t1_header = anat.get_data_and_header(subject, 'brainmask.mgz') for hemi in utils.HEMIS: output_fname = op.join(MMVT_DIR, subject, 'surf', 'T1-{}.npy'.format(hemi)) verts = pial_verts[hemi] t1_surf_hemi = np.zeros((len(verts))) hemi_pial_voxels = np.rint(utils.apply_trans(ras_tkr2vox, verts)).astype(int) for vert_ind, t1_vox in zip(range(len(verts)), hemi_pial_voxels): t1_surf_hemi[vert_ind] = t1_data[tuple(t1_vox)] np.save(output_fname, t1_surf_hemi)
def load_edf_data_rest(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, function='create_raw_data_for_blender', task='rest', bipolar=False, remove_power_line_noise=True, raw_fname='MG102_d3_Fri.edf', # rest_onset_time='6:50:00', # end_time='7:05:00', normalize_data=False, preload=False)) pu.run_on_subjects(args, elecs.main)
def get_modality_trans_file(subject, modality): if modality == 'mri': trans_fname = op.join(MMVT_DIR, subject, 'orig_trans.npz') if not op.isfile(trans_fname): anat.save_subject_orig_trans(subject) elif modality == 'ct': trans_fname = op.join(MMVT_DIR, subject, 'ct_trans.npz') if not op.isfile(trans_fname): anat.save_subject_ct_trans(subject) else: print('The modality {} is not supported!'.format(modality)) return None trans = np.load(trans_fname) return utils.Bag(trans)
def calc_meg_connectivity(args): args = connectivity.read_cmd_args( utils.Bag( subject=args.subject, atlas='laus125', function='calc_lables_connectivity', connectivity_modality='meg', connectivity_method='pli', windows_num=1, # windows_length=500, # windows_shift=100, recalc_connectivity=True, n_jobs=args.n_jobs)) connectivity.call_main(args)
def merge_connectivity(args): for subject in args.mri_subject: conn_args = connectivity.read_cmd_args( dict(subject=subject, atlas=args.atlas, norm_by_percentile=False)) meg_con = np.abs( np.load( op.join(MMVT_DIR, subject, 'connectivity', 'meg_static_pli.npy')).squeeze()) fmri_con = np.abs( np.load( op.join(MMVT_DIR, subject, 'connectivity', 'fmri_static_corr.npy')).squeeze()) d = utils.Bag( np.load( op.join(MMVT_DIR, subject, 'connectivity', 'meg_static_pli.npz'))) labels_names = np.load( op.join(MMVT_DIR, subject, 'connectivity', 'labels_names.npy')) meg_threshold, fmri_threshold = 0.3, 0.5 # if args.top_k == 0: # L = len(d.labels) # args.top_k = int(np.rint(L * (L - 1) / 200)) # meg_con_sparse, meg_top_k = calc_con(meg_con, args.top_k) # fmri_con_sparse, fmri_top_k = calc_con(fmri_con, args.top_k) # if len(set(fmri_top_k).intersection(set(meg_top_k))): # print('fmri and meg top k intersection!') # con = con_fmri - con_meg # if len(np.where(con)[0]) != args.top_k * 2: # print('Wrong number of values in the conn matrix!'.format(len(np.where(con)[0]))) # continue meg_hub, fmri_hub = calc_hubs(meg_con, fmri_con, labels_names, meg_threshold, fmri_threshold) meg_con_hubs, fmri_con_hubs, join_con_hubs = create_con_with_only_hubs( meg_con, fmri_con, meg_hub, fmri_hub, meg_threshold, fmri_threshold) for con_hubs, con_name in zip( [meg_con_hubs, fmri_con_hubs, join_con_hubs], ['meg-hubs', 'fmri-hubs', 'fmri-meg-hubs']): output_fname = op.join(MMVT_DIR, subject, 'connectivity', '{}.npz'.format(con_name)) con_vertices_fname = op.join(MMVT_DIR, subject, 'connectivity', '{}_vertices.pkl'.format(con_name)) connectivity.save_connectivity(subject, con_hubs, con_name, connectivity.ROIS_TYPE, labels_names, d.conditions, output_fname, conn_args, con_vertices_fname) print('{} was saved in {}'.format(con_name, output_fname))
def calc_electrodes_rest_connectivity(args): args = con.read_cmd_args(utils.Bag( subject=args.subject, function='calc_electrodes_rest_connectivity', connectivity_modality='electrodes', connectivity_method='pli,cv', windows_length=1000, windows_shift=200, sfreq=2000.0, fmin=8, fmax=13, # max_windows_num=500, n_jobs=args.n_jobs, )) pu.run_on_subjects(args, con.main)
def calc_meg_connectivity(args): args = con.read_cmd_args( utils.Bag( subject=args.subject, atlas='laus125', function='calc_lables_connectivity', connectivity_modality='meg', connectivity_method='pli,cv', windows_length=500, windows_shift=100, # sfreq=1000.0, # fmin=10, # fmax=100 n_jobs=args.n_jobs)) pu.run_on_subjects(args, con.main)
def darpa_prep_angelique(args): import glob for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] root = op.join('/homes/5/npeled/space1/Angelique/recon-alls', darpa_subject) recon_all_dirs = glob.glob(op.join(root, '**', '*SurferOutput*'), recursive=True) if len(recon_all_dirs) == 0: print("Can't find the recon-all folder for {}!".format(subject)) continue args = anat.read_cmd_args(utils.Bag( subject=subject, function='prepare_subject_folder', remote_subject_dir=recon_all_dirs[0] )) pu.run_on_subjects(args, anat.main)
def main(): import collections parser = argparse.ArgumentParser(description='MMVT') parser.add_argument('-s', '--subject', help='subject name', required=True, type=au.str_arr_type) parser.add_argument('-a', '--atlas', help='atlas name', required=False, default='aparc.DKTatlas') parser.add_argument('-u', '--sftp_username', help='sftp username', required=False, default='npeled') parser.add_argument('-d', '--sftp_domain', help='sftp domain', required=False, default='door.nmr.mgh.harvard.edu') parser.add_argument('--remote_subject_dir', help='remote_subjects_dir', required=False, default='/space/thibault/1/users/npeled/subjects/{subject}') parser.add_argument('-f', '--function', help='function name', required=True) # choices=[f_name for f_name, f in globals().items() if isinstance(f, collections.Callable) # if f_name not in ['Gooey', 'main']] args = utils.Bag(au.parse_parser(parser)) # for subject in args.subject: globals()[args.function](args)
def calc_electrodes_power_spectrum(subject, edf_name, overwrite=False): elecs_args = electrodes.read_cmd_args(utils.Bag( subject=subject, function='create_raw_data_from_edf,calc_epochs_power_spectrum', task='rest', bipolar=False, remove_power_line_noise=True, raw_fname='{}.edf'.format(edf_name), normalize_data=False, preload=True, windows_length=10, # s windows_shift=5, # epochs_num=20, overwrite_power_spectrum=overwrite )) electrodes.call_main(elecs_args)