def test_make_fetcher(): symmetric362 = SPHERE_FILES['symmetric362'] with TemporaryDirectory() as tmpdir: stored_md5 = fetcher._get_file_md5(symmetric362) # create local HTTP Server testfile_url = op.split(symmetric362)[0] + os.sep test_server_url = "http://127.0.0.1:8000/" print(testfile_url) print(symmetric362) current_dir = os.getcwd() # change pwd to directory containing testfile. os.chdir(testfile_url) server = HTTPServer(('localhost', 8000), SimpleHTTPRequestHandler) server_thread = Thread(target=server.serve_forever) server_thread.deamon = True server_thread.start() # test make_fetcher sphere_fetcher = fetcher._make_fetcher("sphere_fetcher", tmpdir, test_server_url, [op.split(symmetric362)[-1]], ["sphere_name"], md5_list=[stored_md5]) sphere_fetcher() assert op.isfile(op.join(tmpdir, "sphere_name")) npt.assert_equal(fetcher._get_file_md5(op.join(tmpdir, "sphere_name")), stored_md5) # stop local HTTP Server server.shutdown() # change to original working directory os.chdir(current_dir)
def test_make_fetcher(): symmetric362 = SPHERE_FILES['symmetric362'] with TemporaryDirectory() as tmpdir: stored_md5 = fetcher._get_file_md5(symmetric362) testfile_url = pathname2url(op.split(symmetric362)[0] + op.sep) testfile_url = urljoin("file:", testfile_url) print(testfile_url) print(symmetric362) sphere_fetcher = fetcher._make_fetcher("sphere_fetcher", tmpdir, testfile_url, [op.split(symmetric362)[-1]], ["sphere_name"], md5_list=[stored_md5]) sphere_fetcher() assert op.isfile(op.join(tmpdir, "sphere_name")) npt.assert_equal(fetcher._get_file_md5(op.join(tmpdir, "sphere_name")), stored_md5)
callosum_md5_hashes = [ "709fa90baadeacd64f1d62b5049a4125", "987c6169de807c4e93dc2cbd7a25d506", "0da114123d0b0097b96fe450a459550b", "6d845bd10504f67f1dc17f9000076d7e", "e16c7873ef4b08d26b77ef746dab8237", "47193fd4df1ea17367817466de798b90", "7e78bf9671e6945f4b2f5e7c30595a3c", "8adbb947377ff7b484c88d8c0ffc2125", "0fd981a4d0847e0642ff96e84fe44e47", "87c4855efa406d8fb004cffb8259180e", "c7969bcf5f2343fd9ce9c49b336cf14c", "bb4372b88991932150205ffb22aa6cb7", "d198d4e7db18ddc7236cf143ecb8342e", "d0f6edef64b0c710c92e634496085dda", "85eaee44665f244db5adae2e259833f6", "25f24eb22879a05d12bda007c81ea55a", "2664e0b8c2d9c59f13649a89bfcce399" ] fetch_callosum_templates = _make_fetcher("fetch_callosum_templates", op.join(afq_home, 'callosum_templates'), baseurl, callosum_remote_fnames, callosum_fnames, md5_list=callosum_md5_hashes, doc="Download AFQ callosum templates") def read_callosum_templates(resample_to=False): """Load AFQ callosum templates from file Returns ------- dict with: keys: names of template ROIs and values: nibabel Nifti1Image objects from each of the ROI nifti files. """ files, folder = fetch_callosum_templates() template_dict = {}
callosum_md5_hashes = [ "709fa90baadeacd64f1d62b5049a4125", "987c6169de807c4e93dc2cbd7a25d506", "0da114123d0b0097b96fe450a459550b", "6d845bd10504f67f1dc17f9000076d7e", "e16c7873ef4b08d26b77ef746dab8237", "47193fd4df1ea17367817466de798b90", "7e78bf9671e6945f4b2f5e7c30595a3c", "8adbb947377ff7b484c88d8c0ffc2125", "0fd981a4d0847e0642ff96e84fe44e47", "87c4855efa406d8fb004cffb8259180e", "c7969bcf5f2343fd9ce9c49b336cf14c", "bb4372b88991932150205ffb22aa6cb7", "d198d4e7db18ddc7236cf143ecb8342e", "d0f6edef64b0c710c92e634496085dda", "85eaee44665f244db5adae2e259833f6", "25f24eb22879a05d12bda007c81ea55a", "2664e0b8c2d9c59f13649a89bfcce399" ] fetch_callosum_templates = _make_fetcher("fetch_callosum_templates", op.join(afq_home, 'callosum_templates'), baseurl, callosum_remote_fnames, callosum_fnames, md5_list=callosum_md5_hashes, doc="Download AFQ callosum templates") def read_callosum_templates(): """Load AFQ callosum templates from file Returns ------- dict with: keys: names of template ROIs and values: nibabel Nifti1Image objects from each of the ROI nifti files. """ files, folder = fetch_callosum_templates() template_dict = {}
"7e78bf9671e6945f4b2f5e7c30595a3c", "8adbb947377ff7b484c88d8c0ffc2125", "0fd981a4d0847e0642ff96e84fe44e47", "87c4855efa406d8fb004cffb8259180e", "c7969bcf5f2343fd9ce9c49b336cf14c", "bb4372b88991932150205ffb22aa6cb7", "d198d4e7db18ddc7236cf143ecb8342e", "d0f6edef64b0c710c92e634496085dda", "85eaee44665f244db5adae2e259833f6", "25f24eb22879a05d12bda007c81ea55a", "2664e0b8c2d9c59f13649a89bfcce399"] fetch_callosum_templates = _make_fetcher("fetch_callosum_templates", op.join(afq_home, 'callosum_templates'), baseurl, callosum_remote_fnames, callosum_fnames, md5_list=callosum_md5_hashes, doc="Download AFQ callosum templates") def read_callosum_templates(): """Load AFQ callosum templates from file Returns ------- dict with: keys: names of template ROIs and values: nibabel Nifti1Image objects from each of the ROI nifti files. """ files, folder = fetch_callosum_templates() template_dict = {}
import tempfile from pathlib import Path import numpy as np import nibabel as nb import pytest from dipy.data.fetcher import _make_fetcher, UW_RW_URL _dipy_datadir_root = os.getenv('DMRIPREP_TESTS_DATA') or Path.home() dipy_datadir = Path(_dipy_datadir_root) / '.cache' / 'data' dipy_datadir.mkdir(parents=True, exist_ok=True) _make_fetcher( "fetch_sherbrooke_3shell", str(dipy_datadir), UW_RW_URL + "1773/38475/", ['HARDI193.nii.gz', 'HARDI193.bval', 'HARDI193.bvec'], ['HARDI193.nii.gz', 'HARDI193.bval', 'HARDI193.bvec'], [ '0b735e8f16695a37bfbd66aab136eb66', 'e9b9bb56252503ea49d31fb30a0ac637', '0c83f7e8b917cd677ad58a078658ebb7' ], doc="Download a 3shell HARDI dataset with 192 gradient direction")() _sherbrooke_data = { 'dwi_file': dipy_datadir / "HARDI193.nii.gz", 'bvecs': np.loadtxt(dipy_datadir / "HARDI193.bvec").T, 'bvals': np.loadtxt(dipy_datadir / "HARDI193.bval"), } @pytest.fixture(autouse=True) def doctest_autoimport(doctest_namespace): """Make available some fundamental modules to doctest modules."""
from os.path import join as pjoin from dipy.data.fetcher import _make_fetcher, dipy_home dname = '../data/' # or dipy_home fetch_sherby = _make_fetcher( "fetch_sherby", pjoin(dname, 'sherby'), 'https://dl.dropboxusercontent.com/u/2481924/', ['Sherby.zip'], ['Sherby.zip'], ['2979482087f5e37846e802ea19542d52'], doc="Download 2 subjects with DWI and T1 datasets", data_size="200MB", unzip=True) fetch_sherby()
# 64 directions # 50x50x50 voxels fetch_isbi2013_challenge = _make_fetcher( "fetch_isbi2013_challenge", os.path.join(dipy_home, 'isbi2013_challenge'), 'http://hardi.epfl.ch/static/events/2013_ISBI/_downloads/', [ 'testing-data_DWIS_hardi-scheme_SNR-10.nii.gz', 'testing-data_DWIS_hardi-scheme_SNR-20.nii.gz', 'testing-data_DWIS_hardi-scheme_SNR-30.nii.gz', 'hardi-scheme.bval', 'hardi-scheme.bvec', 'ground-truth-peaks.nii.gz', ], [ 'hardi-scheme_SNR-10.nii.gz', 'hardi-scheme_SNR-20.nii.gz', 'hardi-scheme_SNR-30.nii.gz', 'hardi-scheme.bval', 'hardi-scheme.bvec', 'ground-truth-peaks.nii.gz', ], [ 'c3d97559f418358bb69467a0b5809630', '33640b1297c8b498e0328fe268dbd5c1', 'a508716c5eec555a77a34817acafb0ca', '92811d6e800a6a56d7498b0c4b5ed0c2', 'c8f5025b9d91037edb6cd00af9bd3e41', 'fc3ecd9636d6130b0f0488812b3a341c', ]) def read_isbi2013_challenge(snr=30): """ Load ISBI 2013's HARDI reconstruction challenge dataset