Ejemplo n.º 1
0
def _fetch_bids_data():  # pragma: no cover
    _, urls = nistats_datasets.fetch_openneuro_dataset_index()

    exclusion_patterns = [
        '*group*', '*phenotype*', '*mriqc*', '*parameter_plots*',
        '*physio_plots*', '*space-fsaverage*', '*space-T1w*', '*dwi*', '*beh*',
        '*task-bart*', '*task-rest*', '*task-scap*', '*task-task*'
    ]
    urls = nistats_datasets.select_from_index(
        urls, exclusion_filters=exclusion_patterns, n_subjects=1)

    data_dir, _ = nistats_datasets.fetch_openneuro_dataset(urls=urls)
    return data_dir
Ejemplo n.º 2
0
def test_select_from_index():
    dataset_version = 'ds000030_R1.0.4'
    data_prefix = '{}/{}/uncompressed'.format(
        dataset_version.split('_')[0], dataset_version)
    # Prepare url files for subject and filter tests
    urls = [
        data_prefix + '/stuff.html', data_prefix + '/sub-xxx.html',
        data_prefix + '/sub-yyy.html',
        data_prefix + '/sub-xxx/ses-01_task-rest.txt',
        data_prefix + '/sub-xxx/ses-01_task-other.txt',
        data_prefix + '/sub-xxx/ses-02_task-rest.txt',
        data_prefix + '/sub-xxx/ses-02_task-other.txt',
        data_prefix + '/sub-yyy/ses-01.txt',
        data_prefix + '/sub-yyy/ses-02.txt'
    ]

    # Only 1 subject and not subject specific files get downloaded
    new_urls = datasets.select_from_index(urls, n_subjects=1)
    assert_true(len(new_urls) == 6)
    assert_true(data_prefix + '/sub-yyy.html' not in new_urls)

    # 2 subjects and not subject specific files get downloaded
    new_urls = datasets.select_from_index(urls, n_subjects=2)
    assert_true(len(new_urls) == 9)
    assert_true(data_prefix + '/sub-yyy.html' in new_urls)
    # ALL subjects and not subject specific files get downloaded
    new_urls = datasets.select_from_index(urls, n_subjects=None)
    assert_true(len(new_urls) == 9)

    # test inclusive filters. Only files with task-rest
    new_urls = datasets.select_from_index(urls,
                                          inclusion_filters=['*task-rest*'])
    assert_true(len(new_urls) == 2)
    assert_true(data_prefix + '/stuff.html' not in new_urls)

    # test exclusive filters. only files without ses-01
    new_urls = datasets.select_from_index(urls, exclusion_filters=['*ses-01*'])
    assert_true(len(new_urls) == 6)
    assert_true(data_prefix + '/stuff.html' in new_urls)

    # test filter combination. only files with task-rest and without ses-01
    new_urls = datasets.select_from_index(urls,
                                          inclusion_filters=['*task-rest*'],
                                          exclusion_filters=['*ses-01*'])
    assert_true(len(new_urls) == 1)
    assert_true(data_prefix + '/sub-xxx/ses-02_task-rest.txt' in new_urls)
Ejemplo n.º 3
0
def download_dataset(cfg):
    """
    Download a dataset from OpenNeuro using nistats functions.
    """
    dataset_version = cfg['version']

    _, urls = fetch_openneuro_dataset_index(dataset_version=dataset_version)

    # Just download based on subject for now.
    # Don't want to accidentally ignore anats or field maps.
    sub = 'sub-{0}'.format(cfg['subject'])
    urls = select_from_index(urls)
    temp_urls1 = [
        url for url in urls if ('derivatives' not in url) and (sub in url)
    ]
    temp_urls2 = [
        url for url in urls
        if ('derivatives' not in url) and ('sub-' not in url)
    ]
    urls = sorted(list(set(temp_urls1 + temp_urls2)))

    _, _ = fetch_openneuro_dataset(urls=urls,
                                   dataset_version=dataset_version,
                                   data_dir=op.abspath('../data/'))
Ejemplo n.º 4
0
# dataset available in openneuro.
# This dataset contains the necessary information to run a statistical analysis
# using Nistats. The dataset also contains statistical results from a previous
# FSL analysis that we can employ for comparison with the Nistats estimation.
from nistats.datasets import (fetch_openneuro_dataset_index,
                              fetch_openneuro_dataset, select_from_index)

_, urls = fetch_openneuro_dataset_index()

exclusion_patterns = [
    '*group*', '*phenotype*', '*mriqc*', '*parameter_plots*', '*physio_plots*',
    '*space-fsaverage*', '*space-T1w*', '*dwi*', '*beh*', '*task-bart*',
    '*task-rest*', '*task-scap*', '*task-task*'
]
urls = select_from_index(urls,
                         exclusion_filters=exclusion_patterns,
                         n_subjects=1)

data_dir, _ = fetch_openneuro_dataset(urls=urls)

##############################################################################
# Obtain FirstLevelModel objects automatically and fit arguments
# ---------------------------------------------------------------
# From the dataset directory we automatically obtain FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain,
# for each model, the list of run images and their respective events and
# confound regressors. Those are inferred from the confounds.tsv files
# available in the BIDS dataset.
# To get the first level models we have to specify the dataset directory,
# the task_label and the space_label as specified in the file names.
# We also have to provide the folder with the desired derivatives, that in this
Ejemplo n.º 5
0
# We download one subject from the stopsignal task in the ds000030 V4 BIDS
# dataset available in openneuro.
# This dataset contains the necessary information to run a statistical analysis
# using Nistats. The dataset also contains statistical results from a previous
# FSL analysis that we can employ for comparison with the Nistats estimation.
from nistats.datasets import (fetch_openneuro_dataset_index,
                              fetch_openneuro_dataset, select_from_index)

_, urls = fetch_openneuro_dataset_index()

exclusion_patterns = ['*group*', '*phenotype*', '*mriqc*',
                      '*parameter_plots*', '*physio_plots*',
                      '*space-fsaverage*', '*space-T1w*',
                      '*dwi*', '*beh*', '*task-bart*',
                      '*task-rest*', '*task-scap*', '*task-task*']
urls = select_from_index(
    urls, exclusion_filters=exclusion_patterns, n_subjects=1)

data_dir, _ = fetch_openneuro_dataset(urls=urls)

##############################################################################
# Obtain automatically FirstLevelModel objects and fit arguments
# ---------------------------------------------------------------
# From the dataset directory we obtain automatically FirstLevelModel objects
# with their subject_id filled from the BIDS dataset. Moreover we obtain
# for each model the list of run imgs and their respective events and
# confounder regressors. Confounders are inferred from the confounds.tsv files
# available in the BIDS dataset.
# To get the first level models we have to specify the dataset directory,
# the task_label and the space_label as specified in the file names.
# We also have to provide the folder with the desired derivatives, that in this
# case were produced by the fmriprep BIDS app.