Example #1
0
def _make_haxby_subject_data(match, response):
    sub_files = [
        'bold.nii.gz', 'labels.txt', 'mask4_vt.nii.gz',
        'mask8b_face_vt.nii.gz', 'mask8b_house_vt.nii.gz',
        'mask8_face_vt.nii.gz', 'mask8_house_vt.nii.gz', 'anat.nii.gz'
    ]
    return list_to_archive(Path(match.group(1), f) for f in sub_files)
Example #2
0
def _adhd_example_subject(match, request):
    contents = [
        Path("data", match.group(1), match.expand(r"\1_regressors.csv")),
        Path("data", match.group(1),
             match.expand(r"\1_rest_tshift_RPI_voreg_mni.nii.gz"))
    ]
    return list_to_archive(contents)
Example #3
0
def test_fetch_surf_fsaverage(mesh, tmp_path, request_mocker):
    # Define attribute list that nilearn meshs should contain
    # (each attribute should eventually map to a _.gii.gz file
    # named after the attribute)
    mesh_attributes = {
        "{}_{}".format(part, side)
        for part in
        ["area", "curv", "infl", "pial", "sphere", "sulc", "thick", "white"]
        for side in ["left", "right"]
    }

    # Mock fsaverage3, 4, 6, 7 download (with actual url)
    fs_urls = [
        "https://osf.io/asvjk/download",
        "https://osf.io/x2j49/download",
        "https://osf.io/um5ag/download",
        "https://osf.io/q7a5k/download",
    ]
    for fs_url in fs_urls:
        request_mocker.url_mapping[fs_url] = list_to_archive(
            ["{}.gii.gz".format(name) for name in mesh_attributes])

    dataset = struct.fetch_surf_fsaverage(mesh, data_dir=str(tmp_path))
    assert mesh_attributes.issubset(set(dataset.keys()))
    assert dataset.description != ''
Example #4
0
def test_fetch_surf_fsaverage5_sphere(tmp_path, request_mocker):
    request_mocker.url_mapping["*b79fy*"] = list_to_archive(
        ["sphere_right.gii", "sphere_left.gii"])
    for mesh in ['fsaverage5_sphere']:

        dataset = struct.fetch_surf_fsaverage(mesh, data_dir=str(tmp_path))

        keys = {'sphere_left', 'sphere_right'}

        assert keys.issubset(set(dataset.keys()))
        assert dataset.description != ''
Example #5
0
def test_fetch_surf_fsaverage(mesh, tmp_path, request_mocker):
    keys = {
        'pial_left', 'pial_right', 'infl_left', 'infl_right', 'sulc_left',
        'sulc_right', 'white_left', 'white_right'
    }
    request_mocker.url_mapping["*fsaverage.tar.gz"] = list_to_archive([
        Path("fsaverage") / "{}.gii".format(k.replace("infl", "inflated"))
        for k in keys
    ])
    dataset = struct.fetch_surf_fsaverage(mesh, data_dir=str(tmp_path))
    assert keys.issubset(set(dataset.keys()))
    assert dataset.description != ''
Example #6
0
def test_dict_to_archive(tmp_path):
    subdir = tmp_path / "tmp"
    subdir.mkdir()
    (subdir / "labels.csv").touch()
    (subdir / "img.nii.gz").touch()
    archive_spec = {
        "empty_data":
        subdir,
        "empty_data_path.txt":
        str(subdir),
        Path("data", "labels.csv"):
        "a,b,c",
        Path("data", "img.nii.gz"):
        generate_fake_fmri()[0],
        Path("a", "b", "c"): (100).to_bytes(length=1,
                                            byteorder="big",
                                            signed=False),
    }
    targz = _testing.dict_to_archive(archive_spec)
    extract_dir = tmp_path / "extract"
    extract_dir.mkdir()
    archive_path = tmp_path / "archive"
    with archive_path.open("wb") as f:
        f.write(targz)
    with tarfile.open(str(archive_path)) as tarf:
        tarf.extractall(str(extract_dir))
    img = image.load_img(str(extract_dir / "data" / "img.nii.gz"))
    assert img.shape == (10, 11, 12, 17)
    with (extract_dir / "a" / "b" / "c").open("rb") as f:
        assert int.from_bytes(f.read(), byteorder="big", signed=False) == 100
    with open(str(extract_dir / "empty_data" / "labels.csv")) as f:
        assert f.read() == ""
    zip_archive = _testing.dict_to_archive(
        {
            "readme.txt": "hello",
            "archive": targz
        }, "zip")
    with archive_path.open("wb") as f:
        f.write(zip_archive)
    with zipfile.ZipFile(str(archive_path)) as zipf:
        with zipf.open("archive", "r") as f:
            assert f.read() == targz
    from_list = _testing.list_to_archive(archive_spec.keys())
    with archive_path.open("wb") as f:
        f.write(from_list)
    with tarfile.open(str(archive_path)) as tarf:
        assert sorted(map(Path, tarf.getnames())) == sorted(
            list(map(Path, archive_spec.keys())) +
            [Path("."), Path("a"),
             Path("a", "b"),
             Path("data")])