コード例 #1
0
ファイル: variables.py プロジェクト: steelec/pipelines
def getvertices(hemi,freesurferdir):
    labellist = [1, 5, 13, 14, 15, 16, 24, 31, 32, 39, 40, 53, 54, 55, 63, 64, 65, 71]
    [vertices,colortable,names] = surf.read_annot(freesurferdir+'/fsaverage4/label/'+hemi[-2:]+'.aparc.a2009s.annot', orig_ids=True)
    chosenvertices = list()
    for j, value in enumerate(vertices) :
        for i, index in enumerate(labellist) :
            if colortable[index][4]==value :
                chosenvertices.append(j)
    return chosenvertices
コード例 #2
0
ファイル: parse.py プロジェクト: JanisReinelt/pipelines
def createSurface(inputFile, hemi):
    [vertices, colortable, names] = surf.read_annot(
        freesurferdir + "/fsaverage4/label/" + hemi + ".aparc.a2009s.annot", orig_ids=True
    )

    clusters = []
    with open(inputFile, "rb") as openfile:
        for line in openfile:
            if line[: len(flag)] == flag:
                clusters.append(line)

    for index, cluster in enumerate(clusters):
        surface = np.zeros_like(vertices)
        startIndex = cluster.find(rowflag) + len(rowflag)
        stopIndex = cluster.find("]", startIndex) - 1

        for x in cluster[startIndex:stopIndex].split(","):
            vertex = x.lstrip()
            if vertex.isdigit():
                surface[int(x) % 2562] = +1
        savefile = inputFile.split(".")[0] + "_cluster" + str(index)
        newImage = nb.nifti1.Nifti1Image(surface, None)
        nb.save(newImage, savefile)
コード例 #3
0
ファイル: parse.py プロジェクト: stymy/pipelines
def createSurface(inputFile, hemi):
    [vertices, colortable, names] = surf.read_annot(
        freesurferdir + '/fsaverage4/label/' + hemi + '.aparc.a2009s.annot',
        orig_ids=True)

    clusters = []
    with open(inputFile, 'rb') as openfile:
        for line in openfile:
            if line[:len(flag)] == flag:
                clusters.append(line)

    for index, cluster in enumerate(clusters):
        surface = np.zeros_like(vertices)
        startIndex = cluster.find(rowflag) + len(rowflag)
        stopIndex = cluster.find(']', startIndex) - 1

        for x in cluster[startIndex:stopIndex].split(','):
            vertex = x.lstrip()
            if vertex.isdigit():
                surface[int(x) % 2562] = +1
        savefile = inputFile.split('.')[0] + '_cluster' + str(index)
        newImage = nb.nifti1.Nifti1Image(surface, None)
        nb.save(newImage, savefile)
コード例 #4
0
ファイル: plot_parc_values.py プロジェクト: bthirion/PySurfer
surface = "inflated"

"""
Bring up the visualization
"""
brain = Brain(subject_id, hemi, surface,
              config_opts=dict(background="lightslategray",
                               cortex="high_contrast"))

"""
Read in the aparc annotation file
"""
aparc_file = op.join(os.environ["SUBJECTS_DIR"],
                     subject_id, "label",
                     hemi + ".aparc.a2009s.annot")
labels, ctab, names = io.read_annot(aparc_file)

"""
Make a random vector of scalar data corresponding to
a value for each region in the parcellation.
"""
roi_data = np.random.random(len(names))

"""
Make a vector containing the data point at each vertex.
"""
vtx_data = np.zeros(len(labels))
for i, data in enumerate(roi_data):
    vtx_data[labels == i] = data

"""
コード例 #5
0
ファイル: plot_foci.py プロジェクト: hanke/PySurfer
"""
brain.add_foci(coords, map_surface="white", color=rgb)

"""
You can also plot foci with a set of surface vertex ids.
For instance, you might want to plot the peak activation
within an ROI for each of your indivdiual subjects over
the group activation map.

Here, we will just demonstrate with a set of randomly
choosen vertices from within the superior temporal sulcus.

First, we load in the Destrieux parcellation annotation file.
"""
annot_path = op.join(subjects_dir, subject_id, "label/lh.aparc.a2009s.annot")
ids, ctab, names = io.read_annot(annot_path)

"""
Then, find 10 random vertices within the STS.
"""
verts = arange(0, len(ids))
coords = permutation(verts[ids == 74])[:10]

"""
You can also control the size of the spheroids.
We'll make these a little bit bigger than our
other foci.
"""
scale_factor = 1.3

"""
コード例 #6
0
ファイル: plot_parc_values.py プロジェクト: sburns/PySurfer
hemi = "lh"
surface = "inflated"
"""
Bring up the visualization
"""
brain = Brain(subject_id,
              hemi,
              surface,
              config_opts=dict(background="lightslategray",
                               cortex="high_contrast"))
"""
Read in the aparc annotation file
"""
aparc_file = op.join(os.environ["SUBJECTS_DIR"], subject_id, "label",
                     hemi + ".aparc.a2009s.annot")
labels, ctab, names = io.read_annot(aparc_file)
"""
Make a random vector of scalar data corresponding to
a value for each region in the parcellation.
"""
roi_data = np.random.random(len(names))
"""
Make a vector containing the data point at each vertex.
"""
vtx_data = np.zeros(len(labels))
for i, data in enumerate(roi_data):
    vtx_data[labels == i] = data
"""
Display these values on the brain.
Use the hot colormap and add an alpha channel
so the underlying anatomy is visible.
コード例 #7
0
                           hemi,
                           "white",
                           subjects_dir=subjects_dir)
    foci_surf.load_geometry()
    foci_vtxs = utils.find_closest_vertices(foci_surf.coords, coords)

    # Load the geometry
    curv_file = op.join(surf_dir, "%s.curv" % hemi)
    curv = io.read_morph_data(curv_file)  # < 0 = gyrus & > 0 = sulcus

    # Load the parcellations
    aparc_file = op.join(label_dir, "%s.aparcDKT40JT.annot" % hemi)
    aparc9_file = op.join(label_dir, "%s.aparc.a2009s.annot" % hemi)
    ba_file = op.join(label_dir, "%s.PALS_B12_Brodmann.annot" % hemi)
    yeo_file = op.join(label_dir, "%s.Yeo2011_7Networks_N1000.annot" % hemi)
    aparc = io.read_annot(aparc_file)
    ba = io.read_annot(ba_file)
    yeo = io.read_annot(yeo_file)
    aparc9 = io.read_annot(aparc9_file)

    yeo_names = [
        "Medial_Wall", "Visual", "Somatomotor", "Dorsal Attention",
        "Ventral Attention", "Limbic", "Frontoparietal", "Default"
    ]

    aparc_names = [
        'Unknown', 'Banks Superior Temporal', 'Caudal Anterior Cingulate',
        'Caudal Middle Frontal', 'Corpus Callosum', 'Cuneus', 'Entorhinal',
        'Fusiform', 'Inferior Parietal', 'Inferior Temporal',
        'Isthmus Cingulate', 'Lateral Occipital', 'Lateral Orbital Frontal',
        'Lingual', 'Medial Orbital Frontal', 'Middle Temporal',
コード例 #8
0
    # Now we can take these peak coordinates and get the surface vertex
    foci_surf   = io.Surface("fsaverage_copy", hemi, "white", subjects_dir=subjects_dir)
    foci_surf.load_geometry()
    foci_vtxs   = utils.find_closest_vertices(foci_surf.coords, coords)

    # Load the geometry
    curv_file   = op.join(surf_dir, "%s.curv" % hemi)
    curv        = io.read_morph_data(curv_file) # < 0 = gyrus & > 0 = sulcus

    # Load the parcellations
    aparc_file  = op.join(label_dir, "%s.aparcDKT40JT.annot" % hemi)
    aparc9_file = op.join(label_dir, "%s.aparc.a2009s.annot" % hemi)
    ba_file     = op.join(label_dir, "%s.PALS_B12_Brodmann.annot" % hemi)
    yeo_file    = op.join(label_dir, "%s.Yeo2011_7Networks_N1000.annot" % hemi)
    aparc       = io.read_annot(aparc_file)
    ba          = io.read_annot(ba_file)
    yeo         = io.read_annot(yeo_file)
    aparc9      = io.read_annot(aparc9_file)

    yeo_names   = ["Medial_Wall", "Visual", "Somatomotor", "Dorsal Attention", 
                   "Ventral Attention", "Limbic", "Frontoparietal", "Default"]
    
    aparc_names = ['Unknown',
     'Banks Superior Temporal',
     'Caudal Anterior Cingulate',
     'Caudal Middle Frontal',
     'Corpus Callosum',
     'Cuneus',
     'Entorhinal',
     'Fusiform',
コード例 #9
0
scan = "short"

easydir = easydirs[scan]
surf_files = {
    "lh": op.join(easydir, "surf_lh_thresh_zstat_FSIQ.nii.gz"), 
    "rh": op.join(easydir, "surf_rh_thresh_zstat_FSIQ.nii.gz")
}
ba_files = {
    "lh": "/home2/data/PublicProgram/freesurfer/fsaverage_copy/label/lh.PALS_B12_Brodmann.annot", 
    "rh": "/home2/data/PublicProgram/freesurfer/fsaverage_copy/label/rh.PALS_B12_Brodmann.annot", 
}


hemi    = "lh"

ba      = io.read_annot(ba_files[hemi])
cwas    = io.read_scalar_data(surf_files[hemi])

rois    = ba[0]
urois   = np.unique(rois); urois.sort()
labels  = np.array(ba[2])[urois]

cols    = ["index", "roi", "ba", "summary_wt", "summary_uwt", "meta_analysis"]
dict_df = { k : [] for k in cols }

for i,label in enumerate(labels):
    if label.find("Brodmann") == -1:
        continue
    
    ba          = int(label[9:])
    roi         = urois[i]
コード例 #10
0
ファイル: plot_foci.py プロジェクト: sburns/PySurfer
"""
brain.add_foci(coords, map_surface="white", color="gold")
"""
You can also plot foci with a set of surface vertex ids.
For instance, you might want to plot the peak activation
within an ROI for each of your indivdiual subjects over
the group activation map.

Here, we will just demonstrate with a set of randomly
choosen vertices from within the superior temporal sulcus.

First, we load in the Destrieux parcellation annotation file
and find 10 random vertices within the STS.
"""
annot_path = op.join(subjects_dir, subject_id, "label/lh.aparc.a2009s.annot")
ids, ctab, names = io.read_annot(annot_path)
verts = arange(0, len(ids))
coords = permutation(verts[ids == 74])[:10]
"""
You can also control the size of the focus glpyhs.
We'll make these a little bit smaller than our
other foci.
"""
scale_factor = 0.7
"""
Finally, plot the foci using the coords_as_verts option to
center each sphereoid at its vertex id.
"""
brain.add_foci(coords,
               coords_as_verts=True,
               scale_factor=scale_factor,
コード例 #11
0
ファイル: plot_foci.py プロジェクト: satra/PySurfer
"""
brain.add_foci(coords, map_surface="white", color=rgb)

"""
You can also plot foci with a set of surface vertex ids.
For instance, you might want to plot the peak activation
within an ROI for each of your indivdiual subjects over
the group activation map.

Here, we will just demonstrate with a set of randomly
choosen vertices from within the superior temporal sulcus.

First, we load in the Destrieux parcellation annotation file.
"""
annot_path = op.join(subjects_dir, subject_id, "label/lh.aparc.a2009s.annot")
ids, ctab = io.read_annot(annot_path)

"""
Then, find 10 random vertices within the STS.
"""
verts = arange(0, len(ids))
coords = permutation(verts[ids == 74])[:10]

"""
You can also control the size of the spheroids.
We'll make these a little bit bigger than our
other foci.
"""
scale_factor = 1.3

"""