def test_fetch_oasis_vbm(): local_url = "file://" + get_datadir() ids = np.asarray(['OAS1_%4d' % i for i in range(457)]) ids = ids.view(dtype=[('ID', 'S9')]) get_file_mock().add_csv('oasis_cross-sectional.csv', ids) # Disabled: cannot be tested without actually fetching covariates CSV file dataset = datasets.fetch_oasis_vbm(data_dir=get_tmpdir(), url=local_url, verbose=0) assert_equal(len(dataset.gray_matter_maps), 403) assert_equal(len(dataset.white_matter_maps), 403) assert_true(isinstance(dataset.gray_matter_maps[0], _basestring)) assert_true(isinstance(dataset.white_matter_maps[0], _basestring)) assert_true(isinstance(dataset.ext_vars, np.recarray)) assert_true(isinstance(dataset.data_usage_agreement, _basestring)) assert_equal(len(get_url_request().urls), 3) dataset = datasets.fetch_oasis_vbm(data_dir=get_tmpdir(), url=local_url, dartel_version=False, verbose=0) assert_equal(len(dataset.gray_matter_maps), 415) assert_equal(len(dataset.white_matter_maps), 415) assert_true(isinstance(dataset.gray_matter_maps[0], _basestring)) assert_true(isinstance(dataset.white_matter_maps[0], _basestring)) assert_true(isinstance(dataset.ext_vars, np.recarray)) assert_true(isinstance(dataset.data_usage_agreement, _basestring)) assert_equal(len(get_url_request().urls), 4)
def test_fetch_haxby(): for i in range(1, 6): haxby = datasets.fetch_haxby(data_dir=get_tmpdir(), n_subjects=i, verbose=0) assert_equal(len(get_url_request().urls), 1 + (i == 1)) # subject_data + md5 assert_equal(len(haxby.func), i) assert_equal(len(haxby.anat), i) assert_equal(len(haxby.session_target), i) assert_equal(len(haxby.mask_vt), i) assert_equal(len(haxby.mask_face), i) assert_equal(len(haxby.mask_house), i) assert_equal(len(haxby.mask_face_little), i) assert_equal(len(haxby.mask_house_little), i) get_url_request().reset()
def test_miyawaki2008(): dataset = datasets.fetch_miyawaki2008(data_dir=get_tmpdir(), verbose=0) assert_equal(len(dataset.func), 32) assert_equal(len(dataset.label), 32) assert_true(isinstance(dataset.mask, _basestring)) assert_equal(len(dataset.mask_roi), 38) assert_equal(len(get_url_request().urls), 1)
def test_fetch_yeo_2011_atlas(): dataset = datasets.fetch_yeo_2011_atlas(data_dir=get_tmpdir(), verbose=0) assert_true(isinstance(dataset.anat, _basestring)) assert_true(isinstance(dataset.colors_17, _basestring)) assert_true(isinstance(dataset.colors_7, _basestring)) assert_true(isinstance(dataset.thick_17, _basestring)) assert_true(isinstance(dataset.thick_7, _basestring)) assert_true(isinstance(dataset.thin_17, _basestring)) assert_true(isinstance(dataset.thin_7, _basestring)) assert_equal(len(get_url_request().urls), 1)
def test_fetch_icbm152_2009(): dataset = datasets.fetch_icbm152_2009(data_dir=get_tmpdir(), verbose=0) assert_true(isinstance(dataset.csf, _basestring)) assert_true(isinstance(dataset.eye_mask, _basestring)) assert_true(isinstance(dataset.face_mask, _basestring)) assert_true(isinstance(dataset.gm, _basestring)) assert_true(isinstance(dataset.mask, _basestring)) assert_true(isinstance(dataset.pd, _basestring)) assert_true(isinstance(dataset.t1, _basestring)) assert_true(isinstance(dataset.t2, _basestring)) assert_true(isinstance(dataset.t2_relax, _basestring)) assert_true(isinstance(dataset.wm, _basestring)) assert_equal(len(get_url_request().urls), 1)
def test_fetch_craddock_2012_atlas(): bunch = datasets.fetch_craddock_2012_atlas(data_dir=get_tmpdir(), verbose=0) keys = ("scorr_mean", "tcorr_mean", "scorr_2level", "tcorr_2level", "random") filenames = [ "scorr05_mean_all.nii.gz", "tcorr05_mean_all.nii.gz", "scorr05_2level_all.nii.gz", "tcorr05_2level_all.nii.gz", "random_all.nii.gz", ] assert_equal(len(get_url_request().urls), 1) for key, fn in zip(keys, filenames): assert_equal(bunch[key], os.path.join(get_tmpdir(), 'craddock_2012', fn))
def test_fetch_smith_2009_atlas(): bunch = datasets.fetch_smith_2009(data_dir=get_tmpdir(), verbose=0) keys = ("rsn20", "rsn10", "rsn70", "bm20", "bm10", "bm70") filenames = [ "rsn20.nii.gz", "PNAS_Smith09_rsn10.nii.gz", "rsn70.nii.gz", "bm20.nii.gz", "PNAS_Smith09_bm10.nii.gz", "bm70.nii.gz", ] assert_equal(len(get_url_request().urls), 6) for key, fn in zip(keys, filenames): assert_equal(bunch[key], os.path.join(get_tmpdir(), 'smith_2009', fn))
def test_fetch_msdl_atlas(): dataset = datasets.fetch_msdl_atlas(data_dir=get_tmpdir(), verbose=0) assert_true(isinstance(dataset.labels, _basestring)) assert_true(isinstance(dataset.maps, _basestring)) assert_equal(len(get_url_request().urls), 1)