def run_group_average_source(*, cfg, subject='average'):
    """Run group average in source space"""

    mne.datasets.fetch_fsaverage(subjects_dir=config.get_fs_subjects_dir())

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(morph_stc,
                                              n_jobs=config.get_n_jobs())
        all_morphed_stcs = parallel(
            run_func(cfg=cfg,
                     subject=subject,
                     fs_subject=config.get_fs_subject(subject),
                     session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))
        mean_morphed_stcs = np.array(all_morphed_stcs).mean(axis=0)

        # XXX to fix
        sessions = config.get_sessions()
        if sessions:
            session = sessions[0]
        else:
            session = None

        run_average(cfg=cfg,
                    session=session,
                    mean_morphed_stcs=mean_morphed_stcs)
Exemplo n.º 2
0
def main():
    """Make reports."""
    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(run_report,
                                              n_jobs=config.get_n_jobs())
        logs = parallel(
            run_func(cfg=get_config(subject=subject),
                     subject=subject,
                     session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)

        sessions = config.get_sessions()
        if not sessions:
            sessions = [None]

        if (config.get_task() is not None
                and config.get_task().lower() == 'rest'):
            msg = '    … skipping "average" report for "rest" task.'
            logger.info(**gen_log_kwargs(message=msg))
            return

        for session in sessions:
            run_report_average(cfg=get_config(subject='average'),
                               subject='average',
                               session=session)
def main():
    # Ensure we're also processing fsaverage if present
    subjects = config.get_subjects()
    if (Path(config.get_fs_subjects_dir()) / 'fsaverage').exists():
        subjects.append('fsaverage')

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(make_coreg_surfaces,
                                              n_jobs=config.get_n_jobs())

        parallel(run_func(get_config(), subject) for subject in subjects)
Exemplo n.º 4
0
def main():
    """Run Time-frequency decomposition."""
    if not config.time_frequency_conditions:
        msg = 'Skipping …'
        logger.info(**gen_log_kwargs(message=msg))
        return

    parallel, run_func, _ = parallel_func(run_time_frequency,
                                          n_jobs=config.get_n_jobs())
    logs = parallel(
        run_func(cfg=get_config(), subject=subject, session=session)
        for subject, session in itertools.product(config.get_subjects(),
                                                  config.get_sessions()))

    config.save_logs(logs)
def main():
    """Run epochs."""
    # Here we use fewer n_jobs to prevent potential memory problems
    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(run_epochs,
                                              n_jobs=max(
                                                  config.get_n_jobs() // 4, 1))
        logs = parallel(
            run_func(cfg=get_config(subject, session),
                     subject=subject,
                     session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)
Exemplo n.º 6
0
def main():
    """Run epochs."""
    parallel, run_func, _ = parallel_func(drop_ptp, n_jobs=config.get_n_jobs())

    with config.get_parallel_backend():
        logs = parallel(
            run_func(cfg=get_config(), subject=subject, session=session)
            for subject, session in
            itertools.product(
                config.get_subjects(),
                config.get_sessions()
            )
        )

        config.save_logs(logs)
Exemplo n.º 7
0
def main():
    """Apply ICA."""
    if not config.spatial_filter == 'ica':
        msg = 'Skipping …'
        logger.info(**gen_log_kwargs(message=msg))
        return

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(apply_ica,
                                              n_jobs=config.get_n_jobs())
        logs = parallel(
            run_func(cfg=get_config(), subject=subject, session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)
def main():
    """Initialize the output directories."""
    msg = 'Running: Initializing output directories.'
    logger.info(**gen_log_kwargs(message=msg))

    with config.get_parallel_backend():
        init_dataset(cfg=get_config())
        parallel, run_func, _ = parallel_func(init_subject_dirs,
                                              n_jobs=config.get_n_jobs())
        parallel(
            run_func(cfg=get_config(), subject=subject, session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        msg = 'Completed: Initializing output directories.'
        logger.info(**gen_log_kwargs(message=msg))
Exemplo n.º 9
0
def main() -> None:
    """Run freesurfer recon-all command on BIDS dataset.

    The script allows to run the freesurfer recon-all
    command on all subjects of your BIDS dataset. It can
    run in parallel with the --n_jobs parameter.

    It is built on top of the FreeSurfer BIDS app:

    https://github.com/BIDS-Apps/freesurfer

    and the MNE BIDS Pipeline

    https://mne.tools/mne-bids-pipeline

    You must have freesurfer available on your system.

    Run via the MNE BIDS Pipeline's `run.py`:

    python run.py --steps=freesurfer --config=your_pipeline_config.py

    """  # noqa

    logger.info('Running FreeSurfer')

    subjects = config.get_subjects()
    root_dir = config.get_bids_root()
    subjects_dir = Path(config.get_fs_subjects_dir())
    subjects_dir.mkdir(parents=True, exist_ok=True)

    with config.get_parallel_backend():
        n_jobs = config.get_n_jobs()
        parallel, run_func, _ = parallel_func(run_recon, n_jobs=n_jobs)
        parallel(run_func(root_dir, subject, fs_bids_app)
                 for subject in subjects)

        # Handle fsaverage
        fsaverage_dir = subjects_dir / 'fsaverage'
        if fsaverage_dir.exists():
            if fsaverage_dir.is_symlink():
                fsaverage_dir.unlink()
            else:
                shutil.rmtree(fsaverage_dir)

        env = os.environ
        shutil.copytree(f"{env['FREESURFER_HOME']}/subjects/fsaverage",
                        subjects_dir / 'fsaverage')
Exemplo n.º 10
0
def main():
    """Run forward."""
    if not config.run_source_estimation:
        msg = '    … skipping: run_source_estimation is set to False.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(run_forward,
                                              n_jobs=config.get_n_jobs())
        logs = parallel(
            run_func(cfg=get_config(subject=subject),
                     subject=subject,
                     session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)
Exemplo n.º 11
0
def main():
    """Run maxwell_filter."""
    if not config.use_maxwell_filter:
        msg = 'Skipping …'
        logger.info(**gen_log_kwargs(message=msg))
        return

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(run_maxwell_filter,
                                              n_jobs=config.get_n_jobs())
        logs = parallel(
            run_func(cfg=get_config(subject, session),
                     subject=subject,
                     session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)
Exemplo n.º 12
0
def main():
    """Run cov."""
    if not config.run_source_estimation:
        msg = '    … skipping: run_source_estimation is set to False.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    if config.noise_cov == "ad-hoc":
        msg = '    … skipping: using ad-hoc diagonal covariance.'
        logger.info(**gen_log_kwargs(message=msg))
        return

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(run_covariance,
                                              n_jobs=config.get_n_jobs())
        logs = parallel(
            run_func(cfg=get_config(), subject=subject, session=session)
            for subject, session in itertools.product(config.get_subjects(),
                                                      config.get_sessions()))

        config.save_logs(logs)
def main():
    """Run filter."""

    with config.get_parallel_backend():
        parallel, run_func, _ = parallel_func(filter_data,
                                              n_jobs=config.get_n_jobs())

        # Enabling different runs for different subjects
        sub_run_ses = []
        for subject in config.get_subjects():
            sub_run_ses += list(
                itertools.product([subject], config.get_runs(subject=subject),
                                  config.get_sessions()))

        logs = parallel(
            run_func(cfg=get_config(subject),
                     subject=subject,
                     run=run,
                     session=session) for subject, run, session in sub_run_ses)

        config.save_logs(logs)