Esempio n. 1
0
def test_param_mask_deprecation_first_level_models_from_bids():
    deprecation_msg = (
        'The parameter "mask" will be removed in next release of Nistats. '
        'Please use the parameter "mask_img" instead.'
    )
    mask_filepath = '~/masks/mask_01.nii.gz'

    with InTemporaryDirectory():
        bids_path = _create_fake_bids_dataset(n_sub=10, n_ses=2,
                                              tasks=['localizer', 'main'],
                                              n_runs=[1, 3])
        with warnings.catch_warnings(record=True) as raised_warnings:
            first_level_models_from_bids(
                    bids_path, 'main', 'MNI', [('variant', 'some')],
                    mask=mask_filepath)
            first_level_models_from_bids(
                    bids_path, 'main', 'MNI', [('variant', 'some')],
                    mask_img=mask_filepath)

    raised_param_deprecation_warnings = [
        raised_warning_ for raised_warning_
        in raised_warnings if
        str(raised_warning_.message).startswith('The parameter')
        ]

    assert len(raised_param_deprecation_warnings) == 1
    for param_warning_ in raised_param_deprecation_warnings:
        assert str(param_warning_.message) == deprecation_msg
        assert param_warning_.category is DeprecationWarning
Esempio n. 2
0
def test_param_mask_deprecation_first_level_models_from_bids():
    deprecation_msg = (
        'The parameter "mask" will be removed in next release of Nistats. '
        'Please use the parameter "mask_img" instead.')
    mask_filepath = '~/masks/mask_01.nii.gz'

    with InTemporaryDirectory():
        bids_path = _create_fake_bids_dataset(n_sub=10,
                                              n_ses=2,
                                              tasks=['localizer', 'main'],
                                              n_runs=[1, 3])
        with warnings.catch_warnings(record=True) as raised_warnings:
            first_level_models_from_bids(bids_path,
                                         'main',
                                         'MNI', [('desc', 'preproc')],
                                         mask=mask_filepath)
            first_level_models_from_bids(bids_path,
                                         'main',
                                         'MNI', [('desc', 'preproc')],
                                         mask_img=mask_filepath)

    raised_param_deprecation_warnings = [
        raised_warning_ for raised_warning_ in raised_warnings
        if str(raised_warning_.message).startswith('The parameter')
    ]

    assert len(raised_param_deprecation_warnings) == 1
    for param_warning_ in raised_param_deprecation_warnings:
        assert str(param_warning_.message) == deprecation_msg
        assert param_warning_.category is DeprecationWarning
Esempio n. 3
0
def test_first_level_models_from_bids():
    with InTemporaryDirectory():
        bids_path = _create_fake_bids_dataset(n_sub=10, n_ses=2,
                                             tasks=['localizer', 'main'],
                                             n_runs=[1, 3])
        # test arguments are provided correctly
        assert_raises(TypeError, first_level_models_from_bids, 2, 'main', 'MNI')
        assert_raises(ValueError, first_level_models_from_bids, 'lolo', 'main', 'MNI')
        assert_raises(TypeError, first_level_models_from_bids, bids_path, 2, 'MNI')
        assert_raises(TypeError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI', model_init=[])
        # test output is as expected
        models, m_imgs, m_events, m_confounds = first_level_models_from_bids(
            bids_path, 'main', 'MNI', [('variant', 'some')])
        assert_true(len(models) == len(m_imgs))
        assert_true(len(models) == len(m_events))
        assert_true(len(models) == len(m_confounds))
        # test repeated run tag error when run tag is in filenames
        # can arise when variant or space is present and not specified
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'T1w')  # variant not specified
        # test more than one ses file error when run tag is not in filenames
        # can arise when variant or space is present and not specified
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'localizer', 'T1w')  # variant not specified
        # test issues with confound files. There should be only one confound
        # file per img. An one per image or None. Case when one is missing
        confound_files = get_bids_files(os.path.join(bids_path, 'derivatives'),
                                        file_tag='confounds')
        os.remove(confound_files[-1])
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')
        # test issues with event files
        events_files = get_bids_files(bids_path, file_tag='events')
        os.remove(events_files[0])
        # one file missing
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')
        for f in events_files[1:]:
            os.remove(f)
        # all files missing
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')

        # In case different variant and spaces exist and are not selected we
        # fail and ask for more specific information
        shutil.rmtree(os.path.join(bids_path, 'derivatives'))
        # issue if no derivatives folder is present
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')

        # check runs are not repeated when ses field is not used
        shutil.rmtree(bids_path)
        bids_path = _create_fake_bids_dataset(n_sub=10, n_ses=1,
                                             tasks=['localizer', 'main'],
                                             n_runs=[1, 3], no_session=True)
        # test repeated run tag error when run tag is in filenames and not ses
        # can arise when variant or space is present and not specified
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'T1w')  # variant not specified
Esempio n. 4
0
def test_first_level_models_from_bids():
    with InTemporaryDirectory():
        bids_path = create_fake_bids_dataset(n_sub=10, n_ses=2,
                                             tasks=['localizer', 'main'],
                                             n_runs=[1, 3])
        # test arguments are provided correctly
        assert_raises(TypeError, first_level_models_from_bids, 2, 'main', 'MNI')
        assert_raises(ValueError, first_level_models_from_bids, 'lolo', 'main', 'MNI')
        assert_raises(TypeError, first_level_models_from_bids, bids_path, 2, 'MNI')
        assert_raises(TypeError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI', model_init=[])
        # test output is as expected
        models, m_imgs, m_events, m_confounds = first_level_models_from_bids(
            bids_path, 'main', 'MNI', [('variant', 'some')])
        assert_true(len(models) == len(m_imgs))
        assert_true(len(models) == len(m_events))
        assert_true(len(models) == len(m_confounds))
        # test repeated run tag error when run tag is in filenames
        # can arise when variant or space is present and not specified
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'T1w')  # variant not specified
        # test more than one ses file error when run tag is not in filenames
        # can arise when variant or space is present and not specified
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'localizer', 'T1w')  # variant not specified
        # test issues with confound files. There should be only one confound
        # file per img. An one per image or None. Case when one is missing
        confound_files = get_bids_files(os.path.join(bids_path, 'derivatives'),
                                        file_tag='confounds')
        os.remove(confound_files[-1])
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')
        # test issues with event files
        events_files = get_bids_files(bids_path, file_tag='events')
        os.remove(events_files[0])
        # one file missing
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')
        for f in events_files[1:]:
            os.remove(f)
        # all files missing
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')

        # In case different variant and spaces exist and are not selected we
        # fail and ask for more specific information
        shutil.rmtree(os.path.join(bids_path, 'derivatives'))
        # issue if no derivatives folder is present
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'MNI')

        # check runs are not repeated when ses field is not used
        shutil.rmtree(bids_path)
        bids_path = create_fake_bids_dataset(n_sub=10, n_ses=1,
                                             tasks=['localizer', 'main'],
                                             n_runs=[1, 3], no_session=True)
        # test repeated run tag error when run tag is in filenames and not ses
        # can arise when variant or space is present and not specified
        assert_raises(ValueError, first_level_models_from_bids,
                      bids_path, 'main', 'T1w')  # variant not specified
Esempio n. 5
0
def _make_flm(data_dir):  # pragma: no cover
    task_label = 'stopsignal'
    space_label = 'MNI152NLin2009cAsym'
    derivatives_folder = 'derivatives/fmriprep'
    models, models_run_imgs, models_events, models_confounds = \
        first_level_models_from_bids(
                data_dir, task_label, space_label, smoothing_fwhm=5.0,
                derivatives_folder=derivatives_folder)

    model, imgs, _, _ = (models[0], models_run_imgs[0], models_events[0],
                         models_confounds[0])
    subject = 'sub-' + model.subject_label
    design_matrix = _make_design_matrix_for_bids_feature(data_dir, subject)
    model.fit(imgs, design_matrices=[design_matrix])
    return model, subject
##############################################################################
# Obtain automatically FirstLevelModel objects and fit arguments
# --------------------------------------------------------------
# From the dataset directory we obtain automatically FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain
# for each model a dictionary with run_imgs, events and confounder regressors
# since in this case a confounds.tsv file is available in the BIDS dataset.
# To get the first level models we only have to specify the dataset directory
# and the task_label as specified in the file names.
from nistats.first_level_model import first_level_models_from_bids
task_label = 'languagelocalizer'
space_label = 'MNI152nonlin2009aAsym'
_, models_run_imgs, models_events, models_confounds = \
    first_level_models_from_bids(
        data_dir, task_label, space_label,
        img_filters=[('variant', 'smoothResamp')])

#############################################################################
# We also need to get the TR information. For that we use a json file
# of the dataset
import os
json_file = os.path.join(data_dir, 'sub-01', 'ses-02', 'func',
                         'sub-01_ses-02_task-languagelocalizer_bold.json')
import json
with open(json_file, 'r') as f:
    t_r = json.load(f)['RepetitionTime']

#############################################################################
# Project fMRI data to the surface: First get fsaverage5
from nilearn.datasets import fetch_surf_fsaverage
Esempio n. 7
0
data_dir, _ = fetch_bids_langloc_dataset()

##############################################################################
# Obtain automatically FirstLevelModel objects and fit arguments
# --------------------------------------------------------------
# From the dataset directory we obtain automatically FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain
# for each model a dictionary with run_imgs, events and confounder regressors
# since in this case a confounds.tsv file is available in the BIDS dataset.
# To get the first level models we only have to specify the dataset directory
# and the task_label as specified in the file names.
task_label = 'languagelocalizer'
space_label = 'MNI152nonlin2009aAsym'
models, models_run_imgs, models_events, models_confounds = \
    first_level_models_from_bids(
        data_dir, task_label, space_label,
        img_filters=[('variant', 'smoothResamp')])

#############################################################################
# Quick sanity check on fit arguments
# -----------------------------------
# Additional checks or information extraction from pre-processed data can
# be made here

############################################################################
# We just expect one run img per subject.
print([os.path.basename(run) for run in models_run_imgs[0]])

###############################################################################
# The only confounds stored are regressors obtained from motion correction. As
# we can verify from the column headers of the confounds table corresponding
Esempio n. 8
0
##############################################################################
# Obtain automatically FirstLevelModel objects and fit arguments
# --------------------------------------------------------------
# From the dataset directory we obtain automatically FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain
# for each model a dictionary with run_imgs, events and confounder regressors
# since in this case a confounds.tsv file is available in the BIDS dataset.
# To get the first level models we only have to specify the dataset directory
# and the task_label as specified in the file names.
from nistats.first_level_model import first_level_models_from_bids

task_label = 'languagelocalizer'
models, models_run_imgs, models_events, models_confounds = \
    first_level_models_from_bids(
        data_dir, task_label,
        img_filters=[('desc', 'preproc')])

#############################################################################
# Quick sanity check on fit arguments
# -----------------------------------
# Additional checks or information extraction from pre-processed data can
# be made here

############################################################################
# We just expect one run img per subject.
import os

print([os.path.basename(run) for run in models_run_imgs[0]])

###############################################################################
Esempio n. 9
0
# From the dataset directory we automatically obtain FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain,
# for each model, the list of run images and their respective events and
# confound regressors. Those are inferred from the confounds.tsv files
# available in the BIDS dataset.
# To get the first level models we have to specify the dataset directory,
# the task_label and the space_label as specified in the file names.
# We also have to provide the folder with the desired derivatives, that in this
# case were produced by the fmriprep BIDS app.
from nistats.first_level_model import first_level_models_from_bids
task_label = 'stopsignal'
space_label = 'MNI152NLin2009cAsym'
derivatives_folder = 'derivatives/fmriprep'
models, models_run_imgs, models_events, models_confounds = \
    first_level_models_from_bids(
        data_dir, task_label, space_label, smoothing_fwhm=5.0,
        derivatives_folder=derivatives_folder)

#############################################################################
# Access the model and model arguments of the subject and process events.
model, imgs, events, confounds = (models[0], models_run_imgs[0],
                                  models_events[0], models_confounds[0])
subject = 'sub-' + model.subject_label

import os
from nistats.utils import get_design_from_fslmat
fsl_design_matrix_path = os.path.join(data_dir, 'derivatives', 'task', subject,
                                      'stopsignal.feat', 'design.mat')
design_matrix = get_design_from_fslmat(fsl_design_matrix_path,
                                       column_names=None)
Esempio n. 10
0
# From the dataset directory we obtain automatically FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain
# for each model the list of run imgs and their respective events and
# confounder regressors. Confounders are inferred from the confounds.tsv files
# available in the BIDS dataset.
# To get the first level models we have to specify the dataset directory,
# the task_label and the space_label as specified in the file names.
# We also have to provide the folder with the desired derivatives, that in this
# case were produced by the fmriprep BIDS app.
from nistats.first_level_model import first_level_models_from_bids
task_label = 'stopsignal'
space_label = 'MNI152NLin2009cAsym'
derivatives_folder = 'derivatives/fmriprep'
models, models_run_imgs, models_events, models_confounds = \
    first_level_models_from_bids(
        data_dir, task_label, space_label, smoothing_fwhm=5.0,
        derivatives_folder=derivatives_folder)

#############################################################################
# Take model and model arguments of the subject and process events
model, imgs, events, confounds = (
    models[0], models_run_imgs[0], models_events[0], models_confounds[0])
subject = 'sub-' + model.subject_label

import os
from nistats.utils import get_design_from_fslmat
fsl_design_matrix_path = os.path.join(
    data_dir, 'derivatives', 'task', subject, 'stopsignal.feat', 'design.mat')
design_matrix = get_design_from_fslmat(
    fsl_design_matrix_path, column_names=None)
Esempio n. 11
0
from nilearn.image import math_img

plt.set_cmap('coolwarm')
DATASET_PATH = '../bids'
out = first_level_models_from_bids(dataset_path=DATASET_PATH,
                                   task_label='gstroop',
                                   space_label='MNI152NLin2009cAsym',
                                   img_filters=[],
                                   t_r=2.0,
                                   slice_time_ref=0.0,
                                   hrf_model='glover',
                                   drift_model=None,
                                   period_cut=128,
                                   drift_order=1,
                                   fir_delays=[0],
                                   min_onset=-24,
                                   mask=None,
                                   target_affine=None,
                                   target_shape=None,
                                   smoothing_fwhm=5,
                                   memory_level=1,
                                   standardize=False,
                                   signal_scaling=0,
                                   noise_model='ar1',
                                   verbose=100,
                                   n_jobs=2,
                                   minimize_memory=True,
                                   derivatives_folder='derivatives/fmriprep')

conf_cols = [
    'Cosine00', 'Cosine01', 'Cosine02', 'Cosine03', 'Cosine04', 'Cosine05',
    'X', 'Y', 'Z', 'RotX', 'RotY', 'RotZ'