Ejemplo n.º 1
0
def test_fetch_openneuro_dataset(request_mocker, tmp_path):
    dataset_version = 'ds000030_R1.0.4'
    data_prefix = '{}/{}/uncompressed'.format(
        dataset_version.split('_')[0],
        dataset_version,
    )
    data_dir = _get_dataset_dir(
        data_prefix,
        data_dir=tmp_path,
        verbose=1,
    )
    url_file = os.path.join(data_dir, 'urls.json')

    # Prepare url files for subject and filter tests
    urls = [
        f'https://example.com/{data_prefix}/stuff.html',
        f'https://example.com/{data_prefix}/sub-xxx.html',
        f'https://example.com/{data_prefix}/sub-yyy.html',
        f'https://example.com/{data_prefix}/sub-xxx/ses-01_task-rest.txt',
        f'https://example.com/{data_prefix}/sub-xxx/ses-01_task-other.txt',
        f'https://example.com/{data_prefix}/sub-xxx/ses-02_task-rest.txt',
        f'https://example.com/{data_prefix}/sub-xxx/ses-02_task-other.txt',
        f'https://example.com/{data_prefix}/sub-yyy/ses-01.txt',
        f'https://example.com/{data_prefix}/sub-yyy/ses-02.txt',
    ]
    json.dump(urls, open(url_file, 'w'))

    # Only 1 subject and not subject specific files get downloaded
    datadir, dl_files = func.fetch_openneuro_dataset(urls, tmp_path,
                                                     dataset_version)
    assert isinstance(datadir, str)
    assert isinstance(dl_files, list)
    assert len(dl_files) == 9

    # URLs do not contain the data_prefix, which should raise a ValueError
    urls = [
        'https://example.com/stuff.html',
        'https://example.com/sub-yyy/ses-01.txt',
    ]
    with pytest.raises(ValueError, match='This indicates that the URLs'):
        func.fetch_openneuro_dataset(urls, tmp_path, dataset_version)

    # Try downloading a different dataset without providing URLs
    # This should raise a warning and download ds000030.
    with pytest.warns(
            UserWarning,
            match='Downloading "ds000030_R1.0.4".',
    ):
        urls_path, urls = func.fetch_openneuro_dataset(
            urls=None,
            data_dir=tmp_path,
            dataset_version='ds500_v2',
            verbose=1,
        )
Ejemplo n.º 2
0
def test_fetch_openneuro_dataset(request_mocker, tmp_path):
    dataset_version = 'ds000030_R1.0.4'
    data_prefix = '{}/{}/uncompressed'.format(
        dataset_version.split('_')[0], dataset_version)
    data_dir = _get_dataset_dir(data_prefix, data_dir=str(tmp_path), verbose=1)
    url_file = os.path.join(data_dir, 'urls.json')
    # Prepare url files for subject and filter tests
    urls = [
        data_prefix + '/stuff.html', data_prefix + '/sub-xxx.html',
        data_prefix + '/sub-yyy.html',
        data_prefix + '/sub-xxx/ses-01_task-rest.txt',
        data_prefix + '/sub-xxx/ses-01_task-other.txt',
        data_prefix + '/sub-xxx/ses-02_task-rest.txt',
        data_prefix + '/sub-xxx/ses-02_task-other.txt',
        data_prefix + '/sub-yyy/ses-01.txt',
        data_prefix + '/sub-yyy/ses-02.txt'
    ]
    json.dump(urls, open(url_file, 'w'))

    # Only 1 subject and not subject specific files get downloaded
    datadir, dl_files = func.fetch_openneuro_dataset(urls, str(tmp_path),
                                                     dataset_version)
    assert isinstance(datadir, str)
    assert isinstance(dl_files, list)
    assert len(dl_files) == 9
Ejemplo n.º 3
0
# FSL analysis that we can employ for comparison with the Nilearn estimation.
from nilearn.datasets.func import (fetch_openneuro_dataset_index,
                                   fetch_openneuro_dataset, select_from_index)

_, urls = fetch_openneuro_dataset_index()

exclusion_patterns = [
    '*group*', '*phenotype*', '*mriqc*', '*parameter_plots*', '*physio_plots*',
    '*space-fsaverage*', '*space-T1w*', '*dwi*', '*beh*', '*task-bart*',
    '*task-rest*', '*task-scap*', '*task-task*'
]
urls = select_from_index(urls,
                         exclusion_filters=exclusion_patterns,
                         n_subjects=1)

data_dir, _ = fetch_openneuro_dataset(urls=urls)

##############################################################################
# Obtain FirstLevelModel objects automatically and fit arguments
# ---------------------------------------------------------------
# From the dataset directory we automatically obtain FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain,
# for each model, the list of run images and their respective events and
# confound regressors. Those are inferred from the confounds.tsv files
# available in the BIDS dataset.
# To get the first level models we have to specify the dataset directory,
# the task_label and the space_label as specified in the file names.
# We also have to provide the folder with the desired derivatives, that in this
# case were produced by the fmriprep BIDS app.
from nilearn.stats.first_level_model import first_level_models_from_bids
task_label = 'stopsignal'