def run_group_average_source(*, cfg, subject='average'):
    """Run group average in source space"""
    if not config.run_source_estimation:
        msg = '    … skipping: run_source_estimation is set to False.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    mne.datasets.fetch_fsaverage(subjects_dir=config.get_fs_subjects_dir())

    parallel, run_func, _ = parallel_func(morph_stc,
                                          n_jobs=config.get_n_jobs())
    all_morphed_stcs = parallel(
        run_func(cfg=cfg,
                 subject=subject,
                 fs_subject=config.get_fs_subject(subject),
                 session=session) for subject, session in itertools.product(
                     config.get_subjects(), config.get_sessions()))
    mean_morphed_stcs = np.array(all_morphed_stcs).mean(axis=0)

    # XXX to fix
    sessions = config.get_sessions()
    if sessions:
        session = sessions[0]
    else:
        session = None

    run_average(cfg=cfg, session=session, mean_morphed_stcs=mean_morphed_stcs)
def run_group_average_source(*, cfg, subject='average'):
    """Run group average in source space"""

    mne.datasets.fetch_fsaverage(subjects_dir=config.get_fs_subjects_dir())

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(morph_stc,
                                              n_jobs=config.get_n_jobs())
        all_morphed_stcs = parallel(
            run_func(cfg=cfg,
                     subject=subject,
                     fs_subject=config.get_fs_subject(subject),
                     session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))
        mean_morphed_stcs = np.array(all_morphed_stcs).mean(axis=0)

        # XXX to fix
        sessions = config.get_sessions()
        if sessions:
            session = sessions[0]
        else:
            session = None

        run_average(cfg=cfg,
                    session=session,
                    mean_morphed_stcs=mean_morphed_stcs)
Beispiel #3
0
def main():
    """Make reports."""
    parallel, run_func, _ = parallel_func(run_report,
                                          n_jobs=config.get_n_jobs())
    logs = parallel(
        run_func(
            cfg=get_config(subject=subject), subject=subject, session=session)
        for subject, session in itertools.product(config.get_subjects(),
                                                  config.get_sessions()))

    config.save_logs(logs)

    sessions = config.get_sessions()
    if not sessions:
        sessions = [None]

    if (config.get_task() is not None and config.get_task().lower() == 'rest'):
        msg = '    … skipping "average" report for "rest" task.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    for session in sessions:
        run_report_average(cfg=get_config(subject='average'),
                           subject='average',
                           session=session)
def main():
    """Run epochs."""
    parallel, run_func, _ = parallel_func(drop_ptp, n_jobs=config.get_n_jobs())
    logs = parallel(
        run_func(cfg=get_config(), subject=subject, session=session)
        for subject, session in itertools.product(config.get_subjects(),
                                                  config.get_sessions()))

    config.save_logs(logs)
def main():
    # Ensure we're also processing fsaverage if present
    subjects = config.get_subjects()
    if (Path(config.get_fs_subjects_dir()) / 'fsaverage').exists():
        subjects.append('fsaverage')

    parallel, run_func, _ = parallel_func(make_coreg_surfaces,
                                          n_jobs=config.get_n_jobs())

    parallel(run_func(get_config(), subject) for subject in subjects)
Beispiel #6
0
def main():
    """Run epochs."""
    # Here we use fewer n_jobs to prevent potential memory problems
    parallel, run_func, _ = parallel_func(
        run_epochs,
        n_jobs=max(config.get_n_jobs() // 4, 1)
    )
    logs = parallel(
        run_func(cfg=get_config(subject, session), subject=subject,
                 session=session)
        for subject, session in
        itertools.product(config.get_subjects(), config.get_sessions())
    )

    config.save_logs(logs)
Beispiel #7
0
def main():
    """Run ICA."""
    if not config.spatial_filter == 'ica':
        msg = 'Skipping …'
        logger.info(**gen_log_kwargs(message=msg))
        return

    parallel, run_func, _ = parallel_func(run_ica, n_jobs=config.get_n_jobs())
    logs = parallel(
        run_func(
            cfg=get_config(subject=subject), subject=subject, session=session)
        for subject, session in itertools.product(config.get_subjects(),
                                                  config.get_sessions()))

    config.save_logs(logs)
Beispiel #8
0
def main():
    """Run inv."""
    if not config.run_source_estimation:
        msg = '    … skipping: run_source_estimation is set to False.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    parallel, run_func, _ = parallel_func(run_inverse,
                                          n_jobs=config.get_n_jobs())
    logs = parallel(
        run_func(cfg=get_config(), subject=subject, session=session)
        for subject, session in itertools.product(config.get_subjects(),
                                                  config.get_sessions()))

    config.save_logs(logs)
Beispiel #9
0
def main():
    """Run Time-frequency decomposition."""
    if not config.time_frequency_conditions:
        msg = 'Skipping …'
        logger.info(**gen_log_kwargs(message=msg))
        return

    parallel, run_func, _ = parallel_func(run_time_frequency,
                                          n_jobs=config.get_n_jobs())
    logs = parallel(
        run_func(cfg=get_config(), subject=subject, session=session)
        for subject, session in itertools.product(config.get_subjects(),
                                                  config.get_sessions()))

    config.save_logs(logs)
Beispiel #10
0
def main():
    """Initialize the output directories."""
    msg = 'Running: Initializing output directories.'
    logger.info(**gen_log_kwargs(message=msg))

    init_dataset(cfg=get_config())
    parallel, run_func, _ = parallel_func(init_subject_dirs,
                                          n_jobs=config.get_n_jobs())
    parallel(
        run_func(cfg=get_config(), subject=subject, session=session)
        for subject, session in itertools.product(config.get_subjects(),
                                                  config.get_sessions()))

    msg = 'Completed: Initializing output directories.'
    logger.info(**gen_log_kwargs(message=msg))
def main() -> None:
    """Run freesurfer recon-all command on BIDS dataset.

    The script allows to run the freesurfer recon-all
    command on all subjects of your BIDS dataset. It can
    run in parallel with the --n_jobs parameter.

    It is built on top of the FreeSurfer BIDS app:

    https://github.com/BIDS-Apps/freesurfer

    and the MNE BIDS Pipeline

    https://mne.tools/mne-bids-pipeline

    You must have freesurfer available on your system.

    Run via the MNE BIDS Pipeline's `run.py`:

    python run.py --steps=freesurfer --config=your_pipeline_config.py

    """  # noqa

    logger.info('Running FreeSurfer')

    subjects = config.get_subjects()
    root_dir = config.get_bids_root()
    subjects_dir = Path(config.get_fs_subjects_dir())
    subjects_dir.mkdir(parents=True, exist_ok=True)

    with config.get_parallel_backend():
        n_jobs = config.get_n_jobs()
        parallel, run_func, _ = parallel_func(run_recon, n_jobs=n_jobs)
        parallel(run_func(root_dir, subject, fs_bids_app)
                 for subject in subjects)

        # Handle fsaverage
        fsaverage_dir = subjects_dir / 'fsaverage'
        if fsaverage_dir.exists():
            if fsaverage_dir.is_symlink():
                fsaverage_dir.unlink()
            else:
                shutil.rmtree(fsaverage_dir)

        env = os.environ
        shutil.copytree(f"{env['FREESURFER_HOME']}/subjects/fsaverage",
                        subjects_dir / 'fsaverage')
def main():
    """Run filter."""
    parallel, run_func, _ = parallel_func(filter_data,
                                          n_jobs=config.get_n_jobs())

    # Enabling different runs for different subjects
    sub_run_ses = []
    for subject in config.get_subjects():
        sub_run_ses += list(
            itertools.product([subject], config.get_runs(subject=subject),
                              config.get_sessions()))

    logs = parallel(
        run_func(
            cfg=get_config(subject), subject=subject, run=run, session=session)
        for subject, run, session in sub_run_ses)

    config.save_logs(logs)
Beispiel #13
0
def main():
    """Run maxwell_filter."""
    if not config.use_maxwell_filter:
        msg = 'Skipping …'
        logger.info(**gen_log_kwargs(message=msg))
        return

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(run_maxwell_filter,
                                              n_jobs=config.get_n_jobs())
        logs = parallel(
            run_func(cfg=get_config(subject, session),
                     subject=subject,
                     session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)
Beispiel #14
0
def main():
    """Run BEM surface extraction."""
    if not config.run_source_estimation:
        msg = '    … skipping: run_source_estimation is set to False.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    if config.use_template_mri:
        msg = '    … skipping BEM computating when using MRI template.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    parallel, run_func, _ = parallel_func(make_bem_and_scalp_surface,
                                          n_jobs=config.get_n_jobs())
    logs = parallel(
        run_func(cfg=get_config(subject=subject), subject=subject)
        for subject in config.get_subjects())

    config.save_logs(logs)
Beispiel #15
0
def get_config(subject: Optional[str] = None,
               session: Optional[str] = None) -> BunchConst:
    cfg = BunchConst(task=config.get_task(),
                     datatype=config.get_datatype(),
                     acq=config.acq,
                     rec=config.rec,
                     space=config.space,
                     deriv_root=config.get_deriv_root(),
                     conditions=config.conditions,
                     contrasts=config.contrasts,
                     decode=config.decode,
                     decoding_metric=config.decoding_metric,
                     decoding_n_splits=config.decoding_n_splits,
                     random_state=config.random_state,
                     analyze_channels=config.analyze_channels,
                     ch_types=config.ch_types,
                     eeg_reference=config.get_eeg_reference(),
                     n_jobs=config.get_n_jobs())
    return cfg
Beispiel #16
0
def get_config(subject: Optional[str] = None,
               session: Optional[str] = None) -> SimpleNamespace:
    cfg = SimpleNamespace(
        task=config.get_task(),
        runs=config.get_runs(subject=subject),
        datatype=config.get_datatype(),
        acq=config.acq,
        rec=config.rec,
        space=config.space,
        mindist=config.mindist,
        spacing=config.spacing,
        use_template_mri=config.use_template_mri,
        source_info_path_update=config.source_info_path_update,
        ch_types=config.ch_types,
        fs_subject=config.get_fs_subject(subject=subject),
        fs_subjects_dir=config.get_fs_subjects_dir(),
        deriv_root=config.get_deriv_root(),
        bids_root=config.get_bids_root(),
        n_jobs=config.get_n_jobs())
    return cfg
Beispiel #17
0
def main():
    """Run cov."""
    if not config.run_source_estimation:
        msg = '    … skipping: run_source_estimation is set to False.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    if config.noise_cov == "ad-hoc":
        msg = '    … skipping: using ad-hoc diagonal covariance.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(run_covariance,
                                              n_jobs=config.get_n_jobs())
        logs = parallel(
            run_func(cfg=get_config(), subject=subject, session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)
def main():
    """Apply ssp."""
    if not config.spatial_filter == 'ssp':
        msg = 'Skipping …'
        logger.info(**gen_log_kwargs(message=msg))
        return

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(
            apply_ssp,
            n_jobs=config.get_n_jobs()
        )
        logs = parallel(
            run_func(cfg=get_config(), subject=subject, session=session)
            for subject, session in
            itertools.product(
                config.get_subjects(),
                config.get_sessions()
            )
        )

        config.save_logs(logs)
def get_config(
    subject: Optional[str] = None,
    session: Optional[str] = None
) -> BunchConst:
    cfg = BunchConst(
        task=config.get_task(),
        runs=config.get_runs(subject=subject),
        datatype=config.get_datatype(),
        acq=config.acq,
        rec=config.rec,
        space=config.space,
        mindist=config.mindist,
        spacing=config.spacing,
        use_template_mri=config.use_template_mri,
        ch_types=config.ch_types,
        fs_subject=config.get_fs_subject(subject=subject),
        fs_subjects_dir=config.get_fs_subjects_dir(),
        deriv_root=config.get_deriv_root(),
        bids_root=config.get_bids_root(),
        n_jobs=config.get_n_jobs()
    )
    return cfg