def run_group_average_source(*, cfg, subject='average'): """Run group average in source space""" if not config.run_source_estimation: msg = ' … skipping: run_source_estimation is set to False.' logger.info(**gen_log_kwargs(message=msg)) return mne.datasets.fetch_fsaverage(subjects_dir=config.get_fs_subjects_dir()) parallel, run_func, _ = parallel_func(morph_stc, n_jobs=config.get_n_jobs()) all_morphed_stcs = parallel( run_func(cfg=cfg, subject=subject, fs_subject=config.get_fs_subject(subject), session=session) for subject, session in itertools.product( config.get_subjects(), config.get_sessions())) mean_morphed_stcs = np.array(all_morphed_stcs).mean(axis=0) # XXX to fix sessions = config.get_sessions() if sessions: session = sessions[0] else: session = None run_average(cfg=cfg, session=session, mean_morphed_stcs=mean_morphed_stcs)
def main(): """Make reports.""" parallel, run_func, _ = parallel_func(run_report, n_jobs=config.get_n_jobs()) logs = parallel( run_func( cfg=get_config(subject=subject), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) config.save_logs(logs) sessions = config.get_sessions() if not sessions: sessions = [None] if (config.get_task() is not None and config.get_task().lower() == 'rest'): msg = ' … skipping "average" report for "rest" task.' logger.info(**gen_log_kwargs(message=msg)) return for session in sessions: run_report_average(cfg=get_config(subject='average'), subject='average', session=session)
def main(): """Run group average in source space""" msg = 'Running Step 13: Grand-average source estimates' logger.info(gen_log_message(step=13, message=msg)) if not config.run_source_estimation: msg = ' … skipping: run_source_estimation is set to False.' logger.info(gen_log_message(step=13, message=msg)) return mne.datasets.fetch_fsaverage(subjects_dir=config.get_fs_subjects_dir()) parallel, run_func, _ = parallel_func(morph_stc, n_jobs=config.N_JOBS) all_morphed_stcs = parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) all_morphed_stcs = [morphed_stcs for morphed_stcs, subject in zip(all_morphed_stcs, config.get_subjects())] mean_morphed_stcs = map(sum, zip(*all_morphed_stcs)) subject = 'average' # XXX to fix if config.get_sessions(): session = config.get_sessions()[0] else: session = None bids_path = BIDSPath(subject=subject, session=session, task=config.get_task(), acquisition=config.acq, run=None, processing=config.proc, recording=config.rec, space=config.space, datatype=config.get_datatype(), root=config.deriv_root, check=False) if isinstance(config.conditions, dict): conditions = list(config.conditions.keys()) else: conditions = config.conditions for condition, this_stc in zip(conditions, mean_morphed_stcs): this_stc /= len(all_morphed_stcs) method = config.inverse_method cond_str = config.sanitize_cond_name(condition) inverse_str = method hemi_str = 'hemi' # MNE will auto-append '-lh' and '-rh'. morph_str = 'morph2fsaverage' fname_stc_avg = bids_path.copy().update( suffix=f'{cond_str}+{inverse_str}+{morph_str}+{hemi_str}') this_stc.save(fname_stc_avg) msg = 'Completed Step 13: Grand-average source estimates' logger.info(gen_log_message(step=13, message=msg))
def run_group_average_source(*, cfg, subject='average'): """Run group average in source space""" mne.datasets.fetch_fsaverage(subjects_dir=config.get_fs_subjects_dir()) with config.get_parallel_backend(): parallel, run_func, _ = parallel_func(morph_stc, n_jobs=config.get_n_jobs()) all_morphed_stcs = parallel( run_func(cfg=cfg, subject=subject, fs_subject=config.get_fs_subject(subject), session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) mean_morphed_stcs = np.array(all_morphed_stcs).mean(axis=0) # XXX to fix sessions = config.get_sessions() if sessions: session = sessions[0] else: session = None run_average(cfg=cfg, session=session, mean_morphed_stcs=mean_morphed_stcs)
def main(): """Run grp ave.""" msg = 'Running Step 13: Grand-average source estimates' logger.info(gen_log_message(step=13, message=msg)) mne.datasets.fetch_fsaverage(subjects_dir=config.get_fs_subjects_dir()) parallel, run_func, _ = parallel_func(morph_stc, n_jobs=config.N_JOBS) all_morphed_stcs = parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) all_morphed_stcs = [morphed_stcs for morphed_stcs, subject in zip(all_morphed_stcs, config.get_subjects())] mean_morphed_stcs = map(sum, zip(*all_morphed_stcs)) subject = 'average' # XXX to fix if config.get_sessions(): session = config.get_sessions()[0] else: session = None deriv_path = config.get_subject_deriv_path(subject=subject, session=session, kind=config.get_kind()) bids_basename = BIDSPath(subject=subject, session=session, task=config.get_task(), acquisition=config.acq, run=None, processing=config.proc, recording=config.rec, space=config.space, prefix=deriv_path, check=False) for condition, this_stc in zip(config.conditions, mean_morphed_stcs): this_stc /= len(all_morphed_stcs) method = config.inverse_method cond_str = condition.replace(op.sep, '').replace('_', '') inverse_str = method hemi_str = 'hemi' # MNE will auto-append '-lh' and '-rh'. morph_str = 'morph2fsaverage' fname_stc_avg = bids_basename.copy().update( kind=f'{cond_str}+{inverse_str}+{morph_str}+{hemi_str}') this_stc.save(fname_stc_avg) msg = 'Completed Step 13: Grand-average source estimates' logger.info(gen_log_message(step=13, message=msg))
def main(): """Make reports.""" msg = 'Running Step 99: Create reports' logger.info(gen_log_message(step=99, message=msg)) parallel, run_func, _ = parallel_func(run_report, n_jobs=config.N_JOBS) parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) sessions = config.get_sessions() if not sessions: sessions = [None] for session in sessions: run_report_average(session)
def main(): """Run sliding estimator.""" if not config.contrasts: msg = 'No contrasts specified; not performing decoding.' logger.info(**gen_log_kwargs(message=msg)) return if not config.decode: msg = 'No decoding requested by user.' logger.info(**gen_log_kwargs(message=msg)) return # Here we go parallel inside the :class:`mne.decoding.SlidingEstimator` # so we don't dispatch manually to multiple jobs. parallel, run_func, _ = parallel_func(run_time_decoding, n_jobs=1) logs = parallel( run_func(cfg=get_config(), subject=subject, condition1=cond_1, condition2=cond_2, session=session) for subject, session, (cond_1, cond_2) in itertools.product( config.get_subjects(), config.get_sessions(), config.contrasts)) config.save_logs(logs)
def main(): """Run sliding estimator.""" msg = 'Running Step 7: Sliding estimator' logger.info(gen_log_message(step=7, message=msg)) if not config.contrasts: msg = 'No contrasts specified; not performing decoding.' logger.info(gen_log_message(step=7, message=msg)) return if not config.decode: msg = 'No decoding requested by user.' logger.info(gen_log_message(step=7, message=msg)) return # Here we go parallel inside the :class:`mne.decoding.SlidingEstimator` # so we don't dispatch manually to multiple jobs. for subject in config.get_subjects(): for session in config.get_sessions(): for contrast in config.contrasts: cond_1, cond_2 = contrast run_time_decoding(subject=subject, condition1=cond_1, condition2=cond_2, session=session) msg = 'Completed Step 7: Sliding estimator' logger.info(gen_log_message(step=7, message=msg))
def main(): """Run epochs.""" parallel, run_func, _ = parallel_func(drop_ptp, n_jobs=config.get_n_jobs()) logs = parallel( run_func(cfg=get_config(), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) config.save_logs(logs)
def main(): """Run forward.""" msg = 'Running Step 10: Create forward solution' logger.info(gen_log_message(step=10, message=msg)) parallel, run_func, _ = parallel_func(run_forward, n_jobs=config.N_JOBS) parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed Step 10: Create forward solution' logger.info(gen_log_message(step=10, message=msg))
def main(): """Run inv.""" msg = 'Running Step 12: Compute and apply inverse solution' logger.info(gen_log_message(step=12, message=msg)) parallel, run_func, _ = parallel_func(run_inverse, n_jobs=config.N_JOBS) parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed Step 12: Compute and apply inverse solution' logger.info(gen_log_message(step=12, message=msg))
def main(): """Run evoked.""" msg = 'Running Step 6: Create evoked data' logger.info(gen_log_message(step=6, message=msg)) parallel, run_func, _ = parallel_func(run_evoked, n_jobs=config.N_JOBS) parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed Step 6: Create evoked data' logger.info(gen_log_message(step=6, message=msg))
def main(): """Run filter.""" msg = 'Running Step 2: Frequency filtering' logger.info(gen_log_message(step=2, message=msg)) parallel, run_func, _ = parallel_func(run_filter, n_jobs=config.N_JOBS) parallel(run_func(subject, run, session) for subject, run, session in itertools.product(config.get_subjects(), config.get_runs(), config.get_sessions())) msg = 'Completed 2: Frequency filtering' logger.info(gen_log_message(step=2, message=msg))
def main(): """Run tf.""" msg = 'Running Step 8: Time-frequency decomposition' logger.info(gen_log_message(message=msg, step=8)) parallel, run_func, _ = parallel_func(run_time_frequency, n_jobs=config.N_JOBS) parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed Step 8: Time-frequency decomposition' logger.info(gen_log_message(message=msg, step=8))
def main(): """Run cov.""" msg = 'Running Step 11: Estimate noise covariance' logger.info(gen_log_message(step=11, message=msg)) parallel, run_func, _ = parallel_func(run_covariance, n_jobs=config.N_JOBS) parallel( run_func(subject, session) for subject, session in itertools.product( config.get_subjects(), config.get_sessions())) msg = 'Completed Step 11: Estimate noise covariance' logger.info(gen_log_message(step=11, message=msg))
def main(): """Run maxwell_filter.""" msg = 'Running Step 1: Data import and Maxwell filtering' logger.info(gen_log_message(step=1, message=msg)) parallel, run_func, _ = parallel_func(run_maxwell_filter, n_jobs=config.N_JOBS) parallel( run_func(subject, session) for subject, session in itertools.product( config.get_subjects(), config.get_sessions())) msg = 'Completed Step 1: Data import and Maxwell filtering' logger.info(gen_log_message(step=1, message=msg))
def main(): sessions = config.get_sessions() if not sessions: sessions = [None] for session in sessions: evokeds = average_evokeds(session) if config.interactive: for evoked in evokeds: evoked.plot() if config.decode: average_decoding(session)
def main(): """Run epochs.""" msg = 'Running Step 3: Epoching' logger.info(gen_log_message(step=3, message=msg)) # Here we use fewer N_JOBS to prevent potential memory problems parallel, run_func, _ = parallel_func(run_epochs, n_jobs=max(config.N_JOBS // 4, 1)) parallel( run_func(subject, session) for subject, session in itertools.product( config.get_subjects(), config.get_sessions())) msg = 'Completed Step 3: Epoching' logger.info(gen_log_message(step=3, message=msg))
def main(): """Run ICA.""" msg = 'Running Step 4: Compute ICA' logger.info(gen_log_message(step=4, message=msg)) if config.use_ica: parallel, run_func, _ = parallel_func(run_ica, n_jobs=config.N_JOBS) parallel( run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed Step 4: Compute ICA' logger.info(gen_log_message(step=4, message=msg))
def main(): """Apply ssp.""" if not config.use_ssp: return msg = 'Running Step 5: Apply SSP' logger.info(gen_log_message(step=5, message=msg)) parallel, run_func, _ = parallel_func(apply_ssp, n_jobs=config.N_JOBS) parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed Step 5: Apply SSP' logger.info(gen_log_message(step=5, message=msg))
def main(): """Run Time-frequency decomposition.""" if not config.time_frequency_conditions: msg = 'Skipping …' logger.info(**gen_log_kwargs(message=msg)) return parallel, run_func, _ = parallel_func(run_time_frequency, n_jobs=config.get_n_jobs()) logs = parallel( run_func(cfg=get_config(), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) config.save_logs(logs)
def main(): """Run epochs.""" # Here we use fewer n_jobs to prevent potential memory problems parallel, run_func, _ = parallel_func( run_epochs, n_jobs=max(config.get_n_jobs() // 4, 1) ) logs = parallel( run_func(cfg=get_config(subject, session), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions()) ) config.save_logs(logs)
def main(): """Run ICA.""" if not config.spatial_filter == 'ica': msg = 'Skipping …' logger.info(**gen_log_kwargs(message=msg)) return parallel, run_func, _ = parallel_func(run_ica, n_jobs=config.get_n_jobs()) logs = parallel( run_func( cfg=get_config(subject=subject), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) config.save_logs(logs)
def main(): """Initialize the output directories.""" msg = 'Running: Initializing output directories.' logger.info(gen_log_message(step=1, message=msg)) init_dataset() parallel, run_func, _ = parallel_func(init_subject_dirs, n_jobs=config.N_JOBS) parallel( run_func(subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed: Initializing output directories.' logger.info(gen_log_message(step=1, message=msg))
def main(): """Run inv.""" if not config.run_source_estimation: msg = ' … skipping: run_source_estimation is set to False.' logger.info(**gen_log_kwargs(message=msg)) return parallel, run_func, _ = parallel_func(run_inverse, n_jobs=config.get_n_jobs()) logs = parallel( run_func(cfg=get_config(), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) config.save_logs(logs)
def main(): """Initialize the output directories.""" msg = 'Running: Initializing output directories.' logger.info(**gen_log_kwargs(message=msg)) with config.get_parallel_backend(): init_dataset(cfg=get_config()) parallel, run_func, _ = parallel_func(init_subject_dirs, n_jobs=config.get_n_jobs()) parallel( run_func(cfg=get_config(), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed: Initializing output directories.' logger.info(**gen_log_kwargs(message=msg))
def main(): """Run forward.""" msg = 'Running Step 10: Create forward solution' logger.info(gen_log_message(step=10, message=msg)) if not config.run_source_estimation: msg = ' … skipping: run_source_estimation is set to False.' logger.info(gen_log_message(step=10, message=msg)) return parallel, run_func, _ = parallel_func(run_forward, n_jobs=config.N_JOBS) parallel(run_func(subject, session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) msg = 'Completed Step 10: Create forward solution' logger.info(gen_log_message(step=10, message=msg))
def run_group_average_sensor(*, cfg, subject='average'): if config.get_task().lower() == 'rest': msg = ' … skipping: for "rest" task.' logger.info(**gen_log_kwargs(message=msg)) return sessions = config.get_sessions() if not sessions: sessions = [None] for session in sessions: evokeds = average_evokeds(cfg, session) if config.interactive: for evoked in evokeds: evoked.plot() if config.decode: average_decoding(cfg, session)
def main(): """Run maxwell_filter.""" if not config.use_maxwell_filter: msg = 'Skipping …' logger.info(**gen_log_kwargs(message=msg)) return with config.get_parallel_backend(): parallel, run_func, _ = parallel_func(run_maxwell_filter, n_jobs=config.get_n_jobs()) logs = parallel( run_func(cfg=get_config(subject, session), subject=subject, session=session) for subject, session in itertools.product(config.get_subjects(), config.get_sessions())) config.save_logs(logs)
def main(): """Run filter.""" parallel, run_func, _ = parallel_func(filter_data, n_jobs=config.get_n_jobs()) # Enabling different runs for different subjects sub_run_ses = [] for subject in config.get_subjects(): sub_run_ses += list( itertools.product([subject], config.get_runs(subject=subject), config.get_sessions())) logs = parallel( run_func( cfg=get_config(subject), subject=subject, run=run, session=session) for subject, run, session in sub_run_ses) config.save_logs(logs)