Пример #1
0
def test_fetch_oasis_vbm():
    local_url = "file://" + get_datadir()
    ids = np.asarray(['OAS1_%4d' % i for i in range(457)])
    ids = ids.view(dtype=[('ID', 'S9')])
    get_file_mock().add_csv('oasis_cross-sectional.csv', ids)

    # Disabled: cannot be tested without actually fetching covariates CSV file
    dataset = datasets.fetch_oasis_vbm(data_dir=get_tmpdir(), url=local_url,
                                       verbose=0)
    assert_equal(len(dataset.gray_matter_maps), 403)
    assert_equal(len(dataset.white_matter_maps), 403)
    assert_true(isinstance(dataset.gray_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.white_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.data_usage_agreement, _basestring))
    assert_equal(len(get_url_request().urls), 3)

    dataset = datasets.fetch_oasis_vbm(data_dir=get_tmpdir(), url=local_url,
                                       dartel_version=False, verbose=0)
    assert_equal(len(dataset.gray_matter_maps), 415)
    assert_equal(len(dataset.white_matter_maps), 415)
    assert_true(isinstance(dataset.gray_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.white_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.data_usage_agreement, _basestring))
    assert_equal(len(get_url_request().urls), 4)
Пример #2
0
def test_fail_fetch_harvard_oxford():
    # specify non-existing atlas item
    assert_raises_regex(ValueError, 'Invalid atlas name',
                        datasets.fetch_harvard_oxford, 'not_inside')

    # specify existing atlas item
    target_atlas = 'cort-maxprob-thr0-1mm'
    target_atlas_fname = 'HarvardOxford-' + target_atlas + '.nii.gz'

    HO_dir = os.path.join(get_tmpdir(), 'harvard_oxford')
    os.mkdir(HO_dir)
    nifti_dir = os.path.join(HO_dir, 'HarvardOxford')
    os.mkdir(nifti_dir)

    target_atlas_nii = os.path.join(nifti_dir, target_atlas_fname)
    datasets.load_mni152_template().to_filename(target_atlas_nii)

    dummy = open(os.path.join(HO_dir, 'HarvardOxford-Cortical.xml'), 'w')
    dummy.write("<?xml version='1.0' encoding='us-ascii'?> "
                "<metadata>"
                "</metadata>")
    dummy.close()

    out_nii, arr = datasets.fetch_harvard_oxford(target_atlas, data_dir=get_tmpdir())

    assert_true(isinstance(nibabel.load(out_nii), nibabel.Nifti1Image))
    assert_true(isinstance(arr, np.ndarray))
    assert_true(len(arr) > 0)
Пример #3
0
def test_fail_fetch_haxby_simple():
    # Test a dataset fetching failure to validate sandboxing
    local_url = "file://" + os.path.join(get_datadir(), "pymvpa-exampledata.tar.bz2")
    datasetdir = os.path.join(get_tmpdir(), 'haxby2001_simple', 'pymvpa-exampledata')
    os.makedirs(datasetdir)
    # Create a dummy file. If sandboxing is successful, it won't be overwritten
    dummy = open(os.path.join(datasetdir, 'attributes.txt'), 'w')
    dummy.write('stuff')
    dummy.close()

    path = 'pymvpa-exampledata'

    opts = {'uncompress': True}
    files = [
            (os.path.join(path, 'attributes.txt'), local_url, opts),
            # The following file does not exists. It will cause an abortion of
            # the fetching procedure
            (os.path.join(path, 'bald.nii.gz'), local_url, opts)
    ]

    assert_raises(IOError, fetchers.fetch_files,
            os.path.join(get_tmpdir(), 'haxby2001_simple'), files,
            verbose=0)
    dummy = open(os.path.join(datasetdir, 'attributes.txt'), 'r')
    stuff = dummy.read(5)
    dummy.close()
    assert_equal(stuff, 'stuff')
Пример #4
0
def test_fetch_oasis_vbm():
    local_url = "file://" + get_datadir()
    ids = np.asarray(['OAS1_%4d' % i for i in range(457)])
    ids = ids.view(dtype=[('ID', 'S9')])
    get_file_mock().add_csv('oasis_cross-sectional.csv', ids)

    # Disabled: cannot be tested without actually fetching covariates CSV file
    dataset = datasets.fetch_oasis_vbm(data_dir=get_tmpdir(),
                                       url=local_url,
                                       verbose=0)
    assert_equal(len(dataset.gray_matter_maps), 403)
    assert_equal(len(dataset.white_matter_maps), 403)
    assert_true(isinstance(dataset.gray_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.white_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.data_usage_agreement, _basestring))
    assert_equal(len(get_url_request().urls), 3)

    dataset = datasets.fetch_oasis_vbm(data_dir=get_tmpdir(),
                                       url=local_url,
                                       dartel_version=False,
                                       verbose=0)
    assert_equal(len(dataset.gray_matter_maps), 415)
    assert_equal(len(dataset.white_matter_maps), 415)
    assert_true(isinstance(dataset.gray_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.white_matter_maps[0], _basestring))
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.data_usage_agreement, _basestring))
    assert_equal(len(get_url_request().urls), 4)
Пример #5
0
def test_fetch_localizer_calculation_task():
    local_url = "file://" + get_datadir()
    ids = np.asarray(['S%2d' % i for i in range(94)])
    ids = ids.view(dtype=[('subject_id', 'S3')])
    get_file_mock().add_csv('cubicwebexport.csv', ids)
    get_file_mock().add_csv('cubicwebexport2.csv', ids)

    # Disabled: cannot be tested without actually fetching covariates CSV file
    # All subjects
    dataset = datasets.fetch_localizer_calculation_task(data_dir=get_tmpdir(),
                                                        url=local_url,
                                                        verbose=0)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)

    # 20 subjects
    dataset = datasets.fetch_localizer_calculation_task(n_subjects=20,
                                                        data_dir=get_tmpdir(),
                                                        url=local_url,
                                                        verbose=0)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 20)
    assert_equal(len(dataset.cmaps), 20)
Пример #6
0
def test_fetch_localizer_calculation_task():
    local_url = "file://" + get_datadir()
    ids = np.asarray(['S%2d' % i for i in range(94)])
    ids = ids.view(dtype=[('subject_id', 'S3')])
    get_file_mock().add_csv('cubicwebexport.csv', ids)
    get_file_mock().add_csv('cubicwebexport2.csv', ids)

    # Disabled: cannot be tested without actually fetching covariates CSV file
    # All subjects
    dataset = datasets.fetch_localizer_calculation_task(data_dir=get_tmpdir(),
                                                        url=local_url,
                                                        verbose=0)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)

    # 20 subjects
    dataset = datasets.fetch_localizer_calculation_task(n_subjects=20,
                                                        data_dir=get_tmpdir(),
                                                        url=local_url,
                                                        verbose=0)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 20)
    assert_equal(len(dataset.cmaps), 20)
Пример #7
0
def test_fetch_haxby_simple():
    local_url = "file://" + os.path.join(get_datadir(), "pymvpa-exampledata.tar.bz2")
    haxby = datasets.fetch_haxby_simple(data_dir=get_tmpdir(), url=local_url,
                                        verbose=0)
    datasetdir = os.path.join(get_tmpdir(), 'haxby2001_simple', 'pymvpa-exampledata')
    for key, file in [
            ('session_target', 'attributes.txt'),
            ('func', 'bold.nii.gz'),
            ('mask', 'mask.nii.gz'),
            ('conditions_target', 'attributes_literal.txt')]:
        assert_equal(haxby[key], os.path.join(datasetdir, file))
        assert_true(os.path.exists(os.path.join(datasetdir, file)))
Пример #8
0
def test_fetch_craddock_2012_atlas():
    bunch = datasets.fetch_craddock_2012_atlas(data_dir=get_tmpdir(), verbose=0)

    keys = ("scorr_mean", "tcorr_mean",
            "scorr_2level", "tcorr_2level",
            "random")
    filenames = [
            "scorr05_mean_all.nii.gz",
            "tcorr05_mean_all.nii.gz",
            "scorr05_2level_all.nii.gz",
            "tcorr05_2level_all.nii.gz",
            "random_all.nii.gz",
    ]
    assert_equal(len(get_url_request().urls), 1)
    for key, fn in zip(keys, filenames):
        assert_equal(bunch[key], os.path.join(get_tmpdir(), 'craddock_2012', fn))
Пример #9
0
def test_miyawaki2008():
    dataset = datasets.fetch_miyawaki2008(data_dir=get_tmpdir(), verbose=0)
    assert_equal(len(dataset.func), 32)
    assert_equal(len(dataset.label), 32)
    assert_true(isinstance(dataset.mask, _basestring))
    assert_equal(len(dataset.mask_roi), 38)
    assert_equal(len(get_url_request().urls), 1)
Пример #10
0
def test_fetch_smith_2009_atlas():
    bunch = datasets.fetch_smith_2009(data_dir=get_tmpdir(), verbose=0)

    keys = ("rsn20", "rsn10", "rsn70",
            "bm20", "bm10", "bm70")
    filenames = [
            "rsn20.nii.gz",
            "PNAS_Smith09_rsn10.nii.gz",
            "rsn70.nii.gz",
            "bm20.nii.gz",
            "PNAS_Smith09_bm10.nii.gz",
            "bm70.nii.gz",
    ]

    assert_equal(len(get_url_request().urls), 6)
    for key, fn in zip(keys, filenames):
        assert_equal(bunch[key], os.path.join(get_tmpdir(), 'smith_2009', fn))
Пример #11
0
def test_fetch_yeo_2011_atlas():
    dataset = datasets.fetch_yeo_2011_atlas(data_dir=get_tmpdir(), verbose=0)
    assert_true(isinstance(dataset.anat, _basestring))
    assert_true(isinstance(dataset.colors_17, _basestring))
    assert_true(isinstance(dataset.colors_7, _basestring))
    assert_true(isinstance(dataset.thick_17, _basestring))
    assert_true(isinstance(dataset.thick_7, _basestring))
    assert_true(isinstance(dataset.thin_17, _basestring))
    assert_true(isinstance(dataset.thin_7, _basestring))
    assert_equal(len(get_url_request().urls), 1)
Пример #12
0
def test_fetch_icbm152_2009():
    dataset = datasets.fetch_icbm152_2009(data_dir=get_tmpdir(), verbose=0)
    assert_true(isinstance(dataset.csf, _basestring))
    assert_true(isinstance(dataset.eye_mask, _basestring))
    assert_true(isinstance(dataset.face_mask, _basestring))
    assert_true(isinstance(dataset.gm, _basestring))
    assert_true(isinstance(dataset.mask, _basestring))
    assert_true(isinstance(dataset.pd, _basestring))
    assert_true(isinstance(dataset.t1, _basestring))
    assert_true(isinstance(dataset.t2, _basestring))
    assert_true(isinstance(dataset.t2_relax, _basestring))
    assert_true(isinstance(dataset.wm, _basestring))
    assert_equal(len(get_url_request().urls), 1)
Пример #13
0
def test_fetch_haxby():
    for i in range(1, 6):
        haxby = datasets.fetch_haxby(data_dir=get_tmpdir(), n_subjects=i,
                                     verbose=0)
        assert_equal(len(get_url_request().urls), 1 + (i == 1))  # subject_data + md5
        assert_equal(len(haxby.func), i)
        assert_equal(len(haxby.anat), i)
        assert_equal(len(haxby.session_target), i)
        assert_equal(len(haxby.mask_vt), i)
        assert_equal(len(haxby.mask_face), i)
        assert_equal(len(haxby.mask_house), i)
        assert_equal(len(haxby.mask_face_little), i)
        assert_equal(len(haxby.mask_house_little), i)
        get_url_request().reset()
Пример #14
0
def test_fetch_msdl_atlas():
    dataset = datasets.fetch_msdl_atlas(data_dir=get_tmpdir(), verbose=0)
    assert_true(isinstance(dataset.labels, _basestring))
    assert_true(isinstance(dataset.maps, _basestring))
    assert_equal(len(get_url_request().urls), 1)
Пример #15
0
def test_fetch_localizer_contrasts():
    local_url = "file://" + get_datadir()
    ids = np.asarray([('S%2d' % i).encode() for i in range(94)])
    ids = ids.view(dtype=[('subject_id', 'S3')])
    get_file_mock().add_csv('cubicwebexport.csv', ids)
    get_file_mock().add_csv('cubicwebexport2.csv', ids)

    # Disabled: cannot be tested without actually fetching covariates CSV file
    # All subjects
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)

    # 20 subjects
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 n_subjects=20,
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_equal(len(dataset.cmaps), 20)
    assert_equal(dataset.ext_vars.size, 20)

    # Multiple contrasts
    dataset = datasets.fetch_localizer_contrasts(
        ["checkerboard", "horizontal checkerboard"],
        n_subjects=20, data_dir=get_tmpdir(),
        verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(len(dataset.cmaps), 20 * 2)  # two contrasts are fetched
    assert_equal(dataset.ext_vars.size, 20)

    # get_anats=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_anats=True,
                                                 verbose=0)
    assert_true(dataset.masks is None)
    assert_true(dataset.tmaps is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.anats[0], _basestring))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.anats), 94)
    assert_equal(len(dataset.cmaps), 94)

    # get_masks=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_masks=True,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.masks[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)
    assert_equal(len(dataset.masks), 94)

    # get_tmaps=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_tmaps=True,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.tmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)
    assert_equal(len(dataset.tmaps), 94)

    # all get_*=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_anats=True,
                                                 get_masks=True,
                                                 get_tmaps=True,
                                                 verbose=0)

    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.anats[0], _basestring))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.masks[0], _basestring))
    assert_true(isinstance(dataset.tmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.anats), 94)
    assert_equal(len(dataset.cmaps), 94)
    assert_equal(len(dataset.masks), 94)
    assert_equal(len(dataset.tmaps), 94)
Пример #16
0
def test_fetch_localizer_contrasts():
    local_url = "file://" + get_datadir()
    ids = np.asarray([('S%2d' % i).encode() for i in range(94)])
    ids = ids.view(dtype=[('subject_id', 'S3')])
    get_file_mock().add_csv('cubicwebexport.csv', ids)
    get_file_mock().add_csv('cubicwebexport2.csv', ids)

    # Disabled: cannot be tested without actually fetching covariates CSV file
    # All subjects
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)

    # 20 subjects
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 n_subjects=20,
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_equal(len(dataset.cmaps), 20)
    assert_equal(dataset.ext_vars.size, 20)

    # Multiple contrasts
    dataset = datasets.fetch_localizer_contrasts(
        ["checkerboard", "horizontal checkerboard"],
        n_subjects=20,
        data_dir=get_tmpdir(),
        verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(len(dataset.cmaps), 20 * 2)  # two contrasts are fetched
    assert_equal(dataset.ext_vars.size, 20)

    # get_anats=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_anats=True,
                                                 verbose=0)
    assert_true(dataset.masks is None)
    assert_true(dataset.tmaps is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.anats[0], _basestring))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.anats), 94)
    assert_equal(len(dataset.cmaps), 94)

    # get_masks=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_masks=True,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.tmaps is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.masks[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)
    assert_equal(len(dataset.masks), 94)

    # get_tmaps=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_tmaps=True,
                                                 verbose=0)
    assert_true(dataset.anats is None)
    assert_true(dataset.masks is None)
    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.tmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.cmaps), 94)
    assert_equal(len(dataset.tmaps), 94)

    # all get_*=True
    dataset = datasets.fetch_localizer_contrasts(["checkerboard"],
                                                 data_dir=get_tmpdir(),
                                                 url=local_url,
                                                 get_anats=True,
                                                 get_masks=True,
                                                 get_tmaps=True,
                                                 verbose=0)

    assert_true(isinstance(dataset.ext_vars, np.recarray))
    assert_true(isinstance(dataset.anats[0], _basestring))
    assert_true(isinstance(dataset.cmaps[0], _basestring))
    assert_true(isinstance(dataset.masks[0], _basestring))
    assert_true(isinstance(dataset.tmaps[0], _basestring))
    assert_equal(dataset.ext_vars.size, 94)
    assert_equal(len(dataset.anats), 94)
    assert_equal(len(dataset.cmaps), 94)
    assert_equal(len(dataset.masks), 94)
    assert_equal(len(dataset.tmaps), 94)