def get_distmat(hemi, parcellation, scale, fn=None):
    if hemi not in ('lh', 'rh'):
        raise ValueError(f'Invalid hemishere designation {hemi}')

    if USE_CACHED and fn is not None:
        fn = DISTDIR / parcellation / 'nomedial' / f'{scale}_{hemi}_dist.npy'
        dist = np.load(fn, allow_pickle=False, mmap_mode='c').astype('float32')
    else:
        surf = nndata.fetch_fsaverage('fsaverage5', data_dir=ROIDIR)['pial']
        subj, spath = nnsurf.check_fs_subjid('fsaverage5')
        medial = Path(spath) / subj / 'label'
        medial_labels = [
            'unknown', 'corpuscallosum', '???',
            'Background+FreeSurfer_Defined_Medial_Wall'
        ]
        if parcellation == 'vertex':
            medial_path = medial / f'{hemi}.Medial_wall.label'
            dist = surface.get_surface_distance(getattr(surf, hemi),
                                                medial=medial_path,
                                                use_wb=False,
                                                verbose=True)
        else:
            annot = _get_annot(parcellation, scale)
            dist = surface.get_surface_distance(getattr(surf, hemi),
                                                getattr(annot, hemi),
                                                medial_labels=medial_labels,
                                                use_wb=False,
                                                verbose=True)
    return dist
Exemplo n.º 2
0
def test_fetch_fsaverage(tmpdir):
    fsaverage = datasets.fetch_fsaverage(data_dir=tmpdir, verbose=0)
    assert all(hasattr(fsaverage, k)
               and len(fsaverage[k]) == 2
               and all(os.path.isfile(hemi)
               for hemi in fsaverage[k]) for k in
               ['orig', 'white', 'smoothwm', 'pial', 'inflated', 'sphere'])
Exemplo n.º 3
0
def _get_fs_subjid(subject_id, subjects_dir=None):
    """
    Gets fsaverage version `subject_id`, fetching if required

    Parameters
    ----------
    subject_id : str
        FreeSurfer subject ID
    subjects_dir : str, optional
        Path to FreeSurfer subject directory. If not set, will inherit from
        the environmental variable $SUBJECTS_DIR. Default: None

    Returns
    -------
    subject_id : str
        FreeSurfer subject ID
    subjects_dir : str
        Path to subject directory with `subject_id`
    """

    from netneurotools.utils import check_fs_subjid

    # check for FreeSurfer install w/fsaverage; otherwise, fetch required
    try:
        subject_id, subjects_dir = check_fs_subjid(subject_id, subjects_dir)
    except FileNotFoundError:
        if 'fsaverage' not in subject_id:
            raise ValueError('Provided subject {} does not exist in provided '
                             'subjects_dir {}'.format(subject_id,
                                                      subjects_dir))
        from netneurotools.datasets import fetch_fsaverage
        from netneurotools.datasets.utils import _get_data_dir
        fetch_fsaverage(subject_id)
        subjects_dir = os.path.join(_get_data_dir(), 'tpl-fsaverage')
        subject_id, subjects_dir = check_fs_subjid(subject_id, subjects_dir)

    return subject_id, subjects_dir
Exemplo n.º 4
0
def make_surf_plot(data, surf='inflated', version='fsaverage5', **kwargs):
    """
    Generates 2 x 2 surface plot of `data`

    Parameters
    ----------
    data : array_like
        Data to be plotted; should be left hemisphere first
    surf : {'orig', 'white', 'smoothwm', 'pial', 'inflated', 'sphere'}
        Which surface plot should be displaye don
    version : {'fsaverage', 'fsaverage5', 'fsaverage6'}
        Which fsaverage surface to use for plots. Dimensionality of `data`
        should match resolution of surface

    Returns
    -------
    fig : matplotlib.figure.Figure
        Figure object
    """

    # get plotting kwargs
    data = np.nan_to_num(data)
    opts = dict(colorbar=False, vmax=data.max())
    opts.update(kwargs)
    for key in ['hemi', 'view', 'axes']:
        if key in opts:
            del opts[key]

    data = np.split(data, 2)
    fs = fetch_fsaverage(version)[surf]

    fig, axes = plt.subplots(2, 2, subplot_kw={'projection': '3d'})
    for row, view in zip(axes, ['lateral', 'medial']):
        for n, (col, hemi) in enumerate(zip(row, ['lh', 'rh'])):
            fn = getattr(fs, hemi)
            hemi = 'left' if hemi == 'lh' else 'right'

            plot_surf_stat_map(fn,
                               data[n],
                               hemi=hemi,
                               view=view,
                               axes=col,
                               **opts)

    fig.tight_layout()

    return fig
Exemplo n.º 5
0
def _fetch_template_surface_files(
    template: str,
    data_dir: Union[str, Path],
    layer: Optional[str] = None,
) -> Tuple[str, str]:
    """Fetches surface files.

    Parameters
    ----------
    template : str
        Name of the surface template. Valid values are "fslr32k", "fsaverage",
        "fsaverage3", "fsaverage4", "fsaverage5", "fsaverage6", "civet41k",
        "civet164k".
      layer : str, optional
        Name of the cortical surface of interest. Valid values are "white",
        "smoothwm", "pial", "inflated", "sphere" for fsaverage surfaces;
        "midthickness", "inflated", "vinflated" for "fslr32k"; "mid, "white" for
        civet surfaces; and "sphere" for "civet41k" If None, defaults to "pial",
        "midthickness", or "mid", by default None.
    data_dir : str, optional
        Directory to save the data, by default None.

    Returns
    -------
    Tuple of str
        Surface files.
    """

    if template == "fslr32k":
        layer = layer if layer else "midthickness"
        bunch = nnt_datasets.fetch_conte69(data_dir=str(data_dir))
    elif template == "civet41k" or template == "civet164k":
        layer = layer if layer else "mid"
        if layer == "sphere":
            return _fetch_civet_spheres(template, data_dir=Path(data_dir))
        else:
            bunch = nnt_datasets.fetch_civet(density=template[5:],
                                             version="v2",
                                             data_dir=str(data_dir))
    else:
        layer = layer if layer else "pial"
        bunch = nnt_datasets.fetch_fsaverage(version=template,
                                             data_dir=str(data_dir))
    return bunch[layer]
Exemplo n.º 6
0
        if annot is not None:
            mgh = putils.parcellate(mgh, getattr(annot, hemi))
        else:
            mgh = np.squeeze(nib.load(mgh).dataobj)
        cdata.append(mgh)
    data = pd.DataFrame(dict(myelin=np.hstack(cdata)), index=index)

    fname.parent.mkdir(parents=True, exist_ok=True)
    data.to_csv(fname, sep=',')

    return fname


if __name__ == "__main__":
    stds = nndata.fetch_hcp_standards(data_dir=ROIDIR)
    fsaverage = nndata.fetch_fsaverage('fsaverage5', data_dir=ROIDIR)['sphere']

    # separate cifti into hemispheres (and convert to gifti)
    cifti = HCPDIR / 'S1200.MyelinMap_BC_MSMAll.32k_fs_LR.dscalar.nii'
    lhout = HCPDIR / 'S1200.L.MyelinMap_BC_MSMSAll.32k_fs_LR.func.gii'
    rhout = HCPDIR / 'S1200.R.MyelinMap_BC_MSMSAll.32k_fs_LR.func.gii'
    run(CIFTISEP.format(cifti=cifti, lhout=lhout, rhout=rhout), quiet=True)

    # for each hemisphere, resample to FreeSurfer fsaverage5 space and convert
    # the resulting GII file to MGH (for consistency with NeuroSynth data)
    for gii, hemi, surf in zip((lhout, rhout), ('L', 'R'), fsaverage):
        out = HCPDIR / f'fsaverage5.MyelinMap.{hemi}.10k_fsavg_{hemi}.func.gii'
        mgh = HCPDIR / f'{hemi.lower()}h.myelin.mgh'
        run(HCP2FS.format(gii=gii, path=stds, hemi=hemi, out=out), quiet=True)
        run(GIITOMGH.format(gii=out, surf=surf, out=mgh), quiet=True)
Exemplo n.º 7
0
# However, there are instances when two vertices may be assigned the same value
# because their closest rotated vertex is identical. When working with surfaces
# that are sampled at a sufficiently high resolution this will occur less
# frequently, but does still happen with some frequency. To demonstrate we can
# grab the coordinates of the `fsaverage6` surface and generate a few
# rotations.
#
# First, we'll grab the spherical projections of the `fsaverage6` surface and
# extract the vertex coordinates:

import nibabel as nib

# if you have FreeSurfer installed on your computer this will simply grab the
# relevant files from the $SUBJECTS_DIR directory; otherwise, it will download
# them to the $HOME/nnt-data/tpl-fsaverage directory
lhsphere, rhsphere = nndata.fetch_fsaverage('fsaverage6', verbose=0)['sphere']

lhvert, lhface = nib.freesurfer.read_geometry(lhsphere)
rhvert, rhface = nib.freesurfer.read_geometry(rhsphere)

###############################################################################
# Then, we'll provide these to the function for generating the spin-based
# resampling array. We also need an indicator array designating which
# coordinates belong to which hemisphere so we'll create that first:

from netneurotools import stats as nnstats
import numpy as np

coords = np.vstack([lhvert, rhvert])
hemi = [0] * len(lhvert) + [1] * len(rhvert)
spins, cost = nnstats.gen_spinsamples(coords, hemi, n_rotate=10, seed=1234)
Exemplo n.º 8
0
            for annot, hemi in [(lh, 'lh'), (rh, 'rh')]:
                tval = annot.replace('space-fsaverage', 'space-{}'.format(trg))
                tval = tval.replace('/fsaverage/', '/{}/'.format(trg))
                msg = f'Generating annotation file: {tval}'
                print(msg, end='\r', flush=True)

                run(SURFCMD.format(trgsubject=trg,
                                   annot=annot,
                                   tval=tval,
                                   hemi=hemi),
                    quiet=True)

    print(' ' * len(msg) + '\b' * len(msg), end='', flush=True)

    hcp = datasets.fetch_hcp_standards()
    fsaverage = datasets.fetch_fsaverage()
    for scale, (lh, rh) in annotations.items():
        for annot, hemi in [(lh, 'lh'), (rh, 'rh')]:
            outdir = op.join(op.dirname(op.dirname(annot)), 'fslr32k')
            gii = annot.replace('.annot', '.label.gii')
            white = fsaverage['white'][0 if hemi == 'lh' else 1]
            fname = op.basename(gii).replace('fsaverage', 'fslr32k')
            msg = f'Generating fslr32k file: {fname}'
            print(msg, end='\r', flush=True)
            run(GIICMD.format(annot=annot, white=white, gii=gii), quiet=True)
            run(HCPCMD.format(annot=gii,
                              path=hcp,
                              vers='',
                              hemi=hemi[0].capitalize(),
                              ires='164',
                              ores='32',
Generated matrices are saved in `data/derivatives/geodesic`.
"""

from pathlib import Path

from netneurotools import datasets as nndata, freesurfer as nnsurf
from parspin import surface, utils as putils

ROIDIR = Path('./data/raw/rois').resolve()
DISTDIR = Path('./data/derivatives/geodesic').resolve()
N_PROC = 24  # parallelization of distance calculation
SURFACE = 'pial'  # surface on which to calculate distance

if __name__ == '__main__':
    parcellations = putils.get_cammoun_schaefer(data_dir=ROIDIR)
    surf = nndata.fetch_fsaverage('fsaverage5', data_dir=ROIDIR)[SURFACE]
    subj, spath = nnsurf.check_fs_subjid('fsaverage5')
    medial = Path(spath) / subj / 'label'
    medial_labels = [
        'unknown', 'corpuscallosum', '???',
        'Background+FreeSurfer_Defined_Medial_Wall'
    ]

    # get parcel distance matrices with this horrible nested for-loop :scream:
    for name, annotations in parcellations.items():
        for scale, annot in annotations.items():
            for hemi in ['lh', 'rh']:
                for allow_med in [True, False]:
                    med = 'medial' if allow_med else 'nomedial'
                    out = DISTDIR / name / med / f'{scale}_{hemi}_dist.csv'
                    if out.exists():