Example #1
0
    def __init__(self, lh_loc=None, rh_loc=None, lh=None, rh=None):

        geo = np.zeros(100)
        n_parcels_l, n_parcels_u = 5, 10

        self.parcels_lh = Parcels(geo, n_parcels_l, n_parcels_u)
        self.parcels_rh = Parcels(geo, n_parcels_l, n_parcels_u)

        self.score = None
        self.age = 0

        if lh_loc is not None:
            if '.annot' in lh_loc:
                lh = io.read_annot(lh_loc)[0]
            else:
                lh = surface.load_surf_data(lh_loc)

        if rh_loc is not None:
            if '.annot' in rh_loc:
                rh = io.read_annot(rh_loc)[0]
            else:
                rh = surface.load_surf_data(rh_loc)

        self.parcels_lh.mask = lh
        self.parcels_lh.n_parcels = np.max(lh)

        self.parcels_rh.mask = rh
        self.parcels_rh.n_parcels = np.max(rh)
Example #2
0
    def parse(self, annot_path):
        [region_mapping, color_table, region_names] = read_annot(annot_path)

        color_mapping_list = [[0 for _ in range(4)]
                              for _ in range(len(region_mapping))]
        for i in range(0, len(region_mapping)):
            for j in range(0, 4):
                color_mapping_list[i][j] = round(
                    (color_table[region_mapping[i]][j] / 255.0), 2)

        return Annotation(region_mapping, color_mapping_list, region_names)
Example #3
0
def _fetch_cammoun_parcellation(template: str, n_regions: int,
                                data_dir: Path) -> List[np.ndarray]:
    """Fetches Cammoun parcellations."""
    key = f"scale{n_regions:03}"
    bunch = nnt_datasets.fetch_cammoun2012(version=template,
                                           data_dir=str(data_dir))
    if template == "fslr32k":
        gifti = [nib_load(file) for file in bunch[key]]
        parcellations = [x.darrays[0].data for x in gifti]
    else:
        parcellations = [read_annot(file)[0] for file in bunch[key]]
    return parcellations
def get_label_data(subject_dir, annot_type):

    # dictionary turning an annotation type into a filename
    annot_dict = {
        'aparc': 'aparc.annot',
        'aparc2009': 'aparc.a2009s.annot',
        'DK': 'aparc.DKTatlas.annot',
        'HCP': 'HCP-MMP1.annot',
        'BN': 'BN_Atlas.annot',
        'Schaefer_100': 'Schaefer2018_100Parcels_7Networks_order.annot',
        'Schaefer_200': 'Schaefer2018_200Parcels_7Networks_order.annot',
        'Schaefer_300': 'Schaefer2018_300Parcels_7Networks_order.annot',
        'Schaefer_400': 'Schaefer2018_400Parcels_7Networks_order.annot',
        'Schaefer_500': 'Schaefer2018_500Parcels_7Networks_order.annot',
        'Schaefer_600': 'Schaefer2018_600Parcels_7Networks_order.annot',
        'Schaefer_800': 'Schaefer2018_800Parcels_7Networks_order.annot',
        'Schaefer_1000': 'Schaefer2018_1000Parcels_7Networks_order.annot',
        'Power': 'power.annot',
        'hcp-mmp': 'hcp-mmp.annot',
        'gordon': 'gordon333dil.annot',
        'arslan': 'arslan_res347.annot',
        'shen': 'shen.annot',
        'Cambridge': '500.aparc.annot'
    }

    if FS_dir_structure:

        lh_annot = read_annot(subject_dir + '/label/lh.' +
                              annot_dict[annot_type])
        rh_annot = read_annot(subject_dir + '/label/rh.' +
                              annot_dict[annot_type])

    else:

        lh_annot = read_annot(subject_dir + '/lh.' + annot_dict[annot_type])
        rh_annot = read_annot(subject_dir + '/rh.' + annot_dict[annot_type])

    return lh_annot[0], rh_annot[0]
Example #5
0
def project_results_annot(annot, results_file, file_like):
    results = pd.read_csv(results_file, sep = ',')
    print(results)
    annot_info = read_annot(annot)
    values = annot_info[0]
    output = values.astype(np.float32)*0
    labels = np.array(annot_info[2])
    for index, row in results.iterrows():
        w = np.where(labels == row.label.encode('utf-8'))[0]
        if (len(w) > 0):            
            output[values == w] = row.value
            print(row.label, w, row.value)
        
    write_morph_data(file_like = file_like, values = output)
Example #6
0
def _fetch_schaefer_parcellation(template: str, n_regions: int,
                                 seven_networks: int,
                                 data_dir: Path) -> List[np.ndarray]:
    """Fetches Schaefer parcellations."""
    n_networks = 7 if seven_networks else 17
    key = f"{n_regions}Parcels{n_networks}Networks"
    bunch = nnt_datasets.fetch_schaefer2018(version=template,
                                            data_dir=str(data_dir))
    if template == "fslr32k":
        cifti = nib_load(bunch[key])
        parcellation_full = np.squeeze(cifti.get_fdata())
        parcellations = [x for x in np.reshape(parcellation_full, (2, -1))]
    else:
        parcellations = [read_annot(file)[0] for file in bunch[key]]
        parcellations[1][parcellations[1] != 0] += n_regions // 2
    return parcellations
Example #7
0
def plot_roi(
    hemi,
    labels,
    color,
    annotation="HCPMMP1",
    view="parietal",
    fs_dir=os.environ["SUBJECTS_DIR"],
    subject_id="S04",
    surf="inflated",
):
    import matplotlib
    import os
    import glob
    from surfer import Brain
    from mne import Label

    color = np.array(matplotlib.colors.to_rgba(color))

    brain = Brain(subject_id, hemi, surf, offscreen=False)
    labels = [label.replace("-rh", "").replace("-lh", "") for label in labels]
    # First select all label files

    label_names = glob.glob(
        os.path.join(fs_dir, subject_id, "label", "lh*.label"))
    label_names = [
        label for label in label_names if any([l in label for l in labels])
    ]

    for label in label_names:
        brain.add_label(label, color=color)

    # Now go for annotations
    from nibabel.freesurfer import io

    ids, colors, annot_names = io.read_annot(
        os.path.join(fs_dir, subject_id, "label", "lh.%s.annot" % annotation),
        orig_ids=True,
    )

    for i, alabel in enumerate(annot_names):
        if any([label in alabel.decode("utf-8") for label in labels]):
            label_id = colors[i, -1]
            vertices = np.where(ids == label_id)[0]
            l = Label(np.sort(vertices), hemi="lh")
            brain.add_label(l, color=color)
    brain.show_view(view)
    return brain.screenshot()
    def __init__(self, fsaverage_dir, hemisphere, atlas):

        # Load patient labels for vertex based on <atlas> parcels (MACRO)
        (self.vertices, self.colortable, self.labels) = fio.read_annot(
            f"{fsaverage_dir}/label/{hemisphere}.{atlas}.annot")

        # Load geometry of the brain
        (self.coords, self.faces) = fio.read_geometry(f"{fsaverage_dir}/surf/{hemisphere}.sphere.reg")

        # an array of list: at position i-th there is the list of faces that the vertex i touches
        self.vertex_to_faces = np.empty((len(self.vertices),), dtype=object)
        for i in range(len(self.vertex_to_faces)):
            self.vertex_to_faces[i] = []
        # For each face, add the face i to the vertex i-th list of faces
        for i, f in enumerate(self.faces):
            for j in range(3):
                self.vertex_to_faces[f[j]].append(i)
Example #9
0
    def write_overlays(self):

        # Set directories to reference
        labelDir = os.path.join(self.subdir, 'label')
        outputDir = os.path.join(self.scDir, 'data')

        # List of dictionaries for parcellations
        parc_list = [{
            'source': 'lh.aparc.annot',
            'output': 'lh_dk_atlas.overlay',
            'name': 'DK Atlas'
        }, {
            'source': 'rh.aparc.annot',
            'output': 'rh_dk_atlas.overlay',
            'name': 'DK Atlas'
        }, {
            'source': 'lh.aparc.a2009s.annot',
            'output': 'lh_d_atlas.overlay',
            'name': 'D Atlas'
        }, {
            'source': 'rh.aparc.a2009s.annot',
            'output': 'rh_d_atlas.overlay',
            'name': 'D Atlas'
        }]

        # Write out the parcellations
        for d in parc_list:
            labels, ctab, names = read_annot(
                os.path.join(labelDir, d['source']))
            overlay_struct = {
                'type': 'atlas',
                'mesh':
                'lh.pial' if d['source'].startswith('lh') else 'rh.pial',
                'data': [0 if val == -1 else val for val in labels.tolist()],
                'ctable': ctab.tolist(),
                'labels': [n.astype(str) for n in names],
                'name': d['name']
            }

            outputTxt = json.dumps(overlay_struct)
            outputName = os.path.join(outputDir, d['output'])
            with open(outputName, 'w') as fo:
                fo.write(outputTxt)
Example #10
0
def main():
    parser = _build_args_parser()
    args = parser.parse_args()
    logging.basicConfig(level=logging.INFO)

    # make sure the surfaces file exists
    if not isfile(args.lh_surface):
        parser.error('The file "{0}" must exist.'.format(args.lh_surface))

    if not isfile(args.lh_surface):
        parser.error('The file "{0}" must exist.'.format(args.rh_surface))

    # make sure the files exist
    if not isfile(args.lh_annot):
        parser.error('The file "{0}" must exist.'.format(args.lh_annot))

    if not isfile(args.rh_annot):
        parser.error('The file "{0}" must exist.'.format(args.rh_annot))

    # generate output filenames
    lh_filename = args.lh_surface.rsplit('.vtk')[0] + args.suffix + '.vtk'
    rh_filename = args.rh_surface.rsplit('.vtk')[0] + args.suffix + '.vtk'

    filenames = [lh_filename, rh_filename]

    # warn if they already exist
    for i in range(2):
        if isfile(filenames[i]):
            if args.overwrite:
                logging.info('Overwriting "{0}".'.format(filenames[i]))
            else:
                parser.error('The file "{0}" already exists. Use -f to overwrite it.'.format(filenames[i]))
    
    # load annotation files
    labels = dict()
    labels[0] = read_annot(args.lh_annot)
    labels[1] = read_annot(args.rh_annot)

    # load the surfaces, downsampling as needed
    logging.info('Loading and setting scalers to .vtk surface.')

    surfaces = dict()
    surfaces[0] = load_vtk(args.lh_surface)
    surfaces[1] = load_vtk(args.rh_surface)

    writer = vtk.vtkPolyDataWriter()

    for j in range(2):
        data_source = labels[j][0]
        data_scaler = vtk.vtkFloatArray()
        data_scaler.SetNumberOfComponents(1)
        data_scaler.SetName('Labels')
        
        for k in range(surfaces[j].GetNumberOfPoints()):
            data_scaler.InsertNextValue(data_source[k])

        surfaces[j].GetPointData().SetScalars(data_scaler)

        filename = filenames[j]
        writer.SetInputData(surfaces[j])
        writer.SetFileName(filename)
        writer.Update()
Example #11
0
import os
import tqdm
import nibabel.freesurfer.io as free
from HMM_settings import *

tasks = ['DM']
bins = np.arange(nbinseq)

for seed in np.arange(5):
    savepath = roidir + str(seed) + '/'
    if not os.path.exists(savepath):
        os.makedirs(savepath)
        for hemi in glob.glob(path + 'ROIs/annot/*'):
            print(seed, hemi)
            lab = free.read_annot(hemi)
            for ri, roi_tmp in tqdm.tqdm(enumerate(lab[2])):
                roi = roi_tmp.decode("utf-8")
                roi_short = roi_tmp.decode("utf-8")[11:]
                roidict = {}
                vall = np.where(lab[0] == ri)[0]
                roidict['hemi'] = (hemi.split('/')[-1][0]).upper()
                for ti, task in enumerate(tasks):
                    roidict[task] = {}
                    nTR_ = nTR[ti]
                    for b in bins:
                        if len(vall) > 0:
                            roidict[task]['bin_' + str(b)] = {}
                            np.random.seed(seed)
                            subl = [
                                ageeq[i][1][b][idx] for i in [0, 1]
Example #12
0
    options = options_parse()

    print()
    print("Rotate Sphere Parameters:")
    print()
    print("- src sphere {}".format(options.srcsphere))
    print("- src aparc: {}".format(options.srcaparc))
    print("- trg sphere {}".format(options.trgsphere))
    print("- trg aparc: {}".format(options.trgaparc))
    print("- out txt {}".format(options.out))

    # read image (only nii supported) and convert to float32
    print("\nreading {}".format(options.srcsphere))
    srcsphere = fs.read_geometry(options.srcsphere, read_metadata=True)
    print("reading annotation: {} ...".format(options.srcaparc))
    srcaparc = fs.read_annot(options.srcaparc)
    print("reading {}".format(options.trgsphere))
    trgsphere = fs.read_geometry(options.trgsphere, read_metadata=True)
    print("reading annotation: {} ...".format(options.trgaparc))
    trgaparc = fs.read_annot(options.trgaparc)

    R = align_aparc_centroids(srcsphere[0], srcaparc[0], trgsphere[0],
                              trgaparc[0])
    alpha, beta, gamma = align.rmat2angles(R)
    print("\nalpha {:.1f}   beta {:.1f}   gamma {:.1f}\n".format(
        alpha, beta, gamma))

    # write angles
    print("writing: {}".format(options.out))
    f = open(options.out, "w")
    f.write("{:.1f} {:.1f} {:.1f}\n".format(alpha, beta, gamma))
Example #13
0
def connectivity_2_tvb_fs(subID,
                          subFolder,
                          SC_matrix,
                          reconallFolder='recon_all'):

    # Create the results folder
    if not os.path.exists(subFolder + 'results/'):
        os.mkdir(subFolder + 'results/')

    # Load the SC matrix
    SC = io.loadmat(subFolder + '/mrtrix_68/tracks_68/' + SC_matrix)
    weights = SC['SC_cap_agg_bwflav2']
    delay = SC['SC_dist_agg_mean']

    # Load the required things compiuted previously by FREESURFER
    lh_vert, lh_faces = fs.read_geometry(subFolder + '/' + reconallFolder +
                                         '/surf/lh.pial')
    rh_vert, rh_faces = fs.read_geometry(subFolder + '/' + reconallFolder +
                                         '/surf/rh.pial')
    cortexMesh = {
        'vertices': np.vstack((lh_vert, rh_vert)),
        'faces': np.vstack((lh_faces, rh_faces + np.shape(lh_vert)[0]))
    }

    # Calculate vertex-normals
    cortexMesh['vertexNormals'] = calcVertNormals(cortexMesh['vertices'],
                                                  cortexMesh['faces'])

    # Load annotation tables
    lh_labels, lh_ctab, lh_names = fs.read_annot(subFolder + '/' +
                                                 reconallFolder +
                                                 '/label/lh.aparc.annot')
    rh_labels, rh_ctab, rh_names = fs.read_annot(subFolder + '/' +
                                                 reconallFolder +
                                                 '/label/rh.aparc.annot')
    # Remove the CC i.e. correct labeling
    lh_labels[lh_labels > 3] -= 1
    rh_labels[rh_labels > 3] -= 1
    # Combine into single vectors
    rh_labels += np.max(lh_labels)
    rh_labels[rh_labels == np.min(rh_labels)] = -1
    cortexMesh['labels'] = np.hstack((lh_labels, rh_labels))
    # Store label name-strings
    tmp = lh_names[1:4] + lh_names[5:]
    tmp_lh = ['lh_' + s for s in tmp]
    tmp_rh = ['rh_' + s for s in tmp]
    cortexMesh['labelNames'] = tmp_lh + tmp_rh

    # Do the TVB mesh clean
    cortexMesh['vertices'], cortexMesh['faces'], cortexMesh[
        'vertexNormals'], cortexMesh['labels'] = removeFB(
            cortexMesh['vertices'], cortexMesh['faces'],
            cortexMesh['vertexNormals'], cortexMesh['labels'])

    # Now finally start storing things....
    # ############

    # Define the filenames
    filenames = [
        'weights.txt', 'centres.txt', 'tract.txt', 'orientation.txt',
        'area.txt', 'cortical.txt', 'hemisphere.txt'
    ]

    # 1.) Weights
    np.savetxt(subFolder + 'results/' + filenames[0],
               weights,
               delimiter=' ',
               fmt='%1i')

    # 2.) Position
    # Calc region centers
    # centers = np.zeros((weights.shape[0], 3))
    with open(subFolder + 'results/' + filenames[1], 'w') as f:
        for i in range(weights.shape[0]):
            # First get all vertices corresponding to a certain region
            regionVertices = cortexMesh['vertices'][cortexMesh['labels'] == i +
                                                    1]
            # Compute the mean of each region
            tmp = np.mean(regionVertices, axis=0)
            # Now look for the nearest neighbors
            idx = np.sum(np.abs(cortexMesh['vertices'] - tmp), axis=1).argmin()
            # Define the nearest vertex as center
            # centers[i, :] = cortexMesh['vertices'][idx, :]
            center = cortexMesh['vertices'][idx, :]
            # Write file
            f.write('{0} {1} {2} {3}\n'.format(cortexMesh['labelNames'][i],
                                               str(center[0]), str(center[1]),
                                               str(center[2])))
        f.close()

    # 3.) Tract
    np.savetxt(subFolder + 'results/' + filenames[2],
               delay,
               delimiter=' ',
               fmt='%1i')

    # 4.) Orientation
    with open(subFolder + 'results/' + filenames[3], 'w') as f:
        for i in range(weights.shape[0]):
            # Get all vertex-normals correspodning to the vertices of the current region
            regionVertexNormals = cortexMesh['vertexNormals'][
                cortexMesh['labels'] == i + 1]
            # Compute mean vector
            orientation = np.mean(regionVertexNormals, axis=0)
            # Normalize it
            orientation /= np.sqrt(orientation[0]**2 + orientation[1]**2 +
                                   orientation[2]**2)
            # Write to file
            f.write('{0} {1} {2}\n'.format(str(orientation[0]),
                                           str(orientation[1]),
                                           str(orientation[2])))
        f.close()

    # 5.) Area
    # I'm not quite sure how to get to the exact value for the surface in mm^2
    # so for now i just count the surface vertices corresponding to each region
    # EDIT: According to the TVB Dokumentation, this attribute is not mandatory
    # for the Input!
    with open(subFolder + 'results/' + filenames[4], 'w') as f:
        for i in range(weights.shape[0]):
            area = np.count_nonzero(cortexMesh['labels'] == i)
            f.write('{0}\n'.format(str(area)))
        f.close()

    # 6.) Cortical
    # Since in the default atlas all areas are cortical
    cortical = np.ones((68, 1))
    np.savetxt(subFolder + 'results/' + filenames[5],
               cortical,
               delimiter=' ',
               fmt='%1i')

    # 7.) Hemisphere
    # Again hard coded for Desikan-Killany Mask!
    # TODO: Make this flexible!
    hemisphere = np.vstack((np.zeros((34, 1)), np.ones((34, 1))))
    np.savetxt(subFolder + 'results/' + filenames[6],
               hemisphere,
               delimiter=' ',
               fmt='%1i')

    # Assemble the Zip-File
    zf = zipfile.ZipFile(subFolder + 'results/' + subID + '_Connectivity.zip',
                         mode='w')
    for fname in filenames:
        zf.write(subFolder + 'results/' + fname)
        os.remove(subFolder + 'results/' + fname)
    zf.close()
Example #14
0
import scipy as sp
import nibabel.freesurfer.io as fsio
from surfproc import view_patch_vtk, patch_color_labels, smooth_patch
from dfsio import writedfs, readdfs
from nibabel.gifti.giftiio import read as gread

labels, _, _ = fsio.read_annot(
    '/big_disk/ajoshi/freesurfer/subjects/sub06880/label/rh.HCP-MMP1.annot')
vert, faces = fsaverage_surf = fsio.read_geometry(
    '/big_disk/ajoshi/freesurfer/subjects/sub06880/surf/rh.pial')


class fs:
    pass


fs.vertices = vert
fs.faces = faces
fs.labels = labels
fs = patch_color_labels(fs)
view_patch_vtk(fs, outfile='fs1.png')  #, azimuth=-90, roll=90)


class bs:
    pass


bs1 = readdfs('/home/ajoshi/Desktop/test/T1.right.pial.cortex.svreg.dfs')
bs = readdfs(
    '/home/ajoshi/Desktop/test/multiparc/T1.right.mid.cortex.svreg.HCP-MMP1.dfs'
)
def load_freesurfer_annot(filename, orig_ids=False):
    '''
    load_freesurfer_annot(filename) is equivalent to nibabel.freesurfer.io.read_annot(filename).
    '''
    return fsio.read_annot(filename, orig_ids=orig_ids)
myplot = sns.boxplot(x='network assignment', y='hctsa node score',
                     data=network_score,
                     width=0.4, fliersize=3, showcaps=False,
                     order=plot_order, showfliers=False)

sns.despine(ax=myplot, offset=5, trim=True)
myplot.axes.set_title('PC %s' % (ncomp+1))
myplot.figure.set_figwidth(10)
myplot.figure.set_figheight(6)

####################################
# Evolutionary expansion
####################################
# load data and parcellate
schaefer_fsaverage6 = read_annot('../data/schaefer/FreeSurfer5.3/' +
                                 'fsaverage6/label/' +
                                 'rh.Schaefer2018_400Parcels_7Networks' +
                                 '_order.annot')
evolutionExpData = np.loadtxt('../data/evolutionaryExpansion/' +
                              'Hill2010_evo_fsaverage6.txt')

parcelIDs = np.unique(schaefer_fsaverage6[0])
parcelIDs = np.delete(parcelIDs, 0)

parcellatedData = np.zeros((len(parcelIDs), 1))

for IDnum in parcelIDs:
    idx = np.where(schaefer_fsaverage6[0] == IDnum)[0]
    parcellatedData[IDnum-1, 0] = np.nanmean(evolutionExpData[idx])


# plot on brain surface
Example #17
0
        fshemi = 'rh'

    outfile = 'BCI-DNI_Brainnetome' + '.' + hemi + '.mid.cortex.dfs'

    ''' BCI to FS processed BCI '''
    bci_bsti = readdfs(
        '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.' + hemi + '.inner.cortex.dfs')
    bci_bst_mid = readdfs(
        '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.' + hemi + '.mid.cortex.dfs')

    bci_bsti.vertices[:, 0] -= 96*0.8
    bci_bsti.vertices[:, 1] -= 192*0.546875
    bci_bsti.vertices[:, 2] -= 192*0.546875
    bci.vertices, bci.faces = fsio.read_geometry(
        '/big_disk/ajoshi/data/BCI_DNI_Atlas/surf/' + fshemi + '.white')
    bci.labels = np.zeros(bci.vertices.shape[0])
    fslabels, label1, colortable1 = fsio.read_annot(
        '/big_disk/ajoshi/freesurfer/subjects/BCI_DNI_Atlas/label/' + fshemi + '.BN_Atlas.annot')
    fslabels[fslabels<0] = 0
    bci.labels = fslabels

    bci = patch_color_labels(bci)
    view_patch_vtk(bci)

    bci_bsti = interpolate_labels(bci, bci_bsti)
    bci_bst_mid.labels = bci_bsti.labels
    bci_bst_mid = smooth_patch(bci_bst_mid, iterations=3000, relaxation=.5)
    bci_bst_labels = patch_color_labels(bci_bst_mid)
    view_patch_vtk(bci_bst_labels)
    writedfs(outfile, bci_bst_labels)
Example #18
0
def runfreesurfer2(subjectid,extraflags=''):
    '''
    def runfreesurfer2(subjectid,extraflags):

    <subjectid> is like 'C0001'
    <extraflags> (optional) is a string with extra flags to pass to recon-all.
     Default: ''

    This is part 2/2 for pushing anatomical data through FreeSurfer.
    see code for assumptions.
    '''
    from RZutilpy.cvnpy import cvnpath, writemgz, fstoint
    from RZutilpy.system import makedirs,Path
    from RZutilpy.rzio import savepkl, loadpkl
    import nibabel.freesurfer.io as fsio
    import nibabel as nib
    import re
    from numpy import stack
    from sklearn.neighbors import NearestNeighbors


    # calc
    dir0 = (Path(cvnpath('anatomicals')) / subjectid).str
    fsdir = (Path(cvnpath('freesurfer')) / subjectid).str

    # make subject anatomical directory
    makedirs(dir0)

    # convert some miscellaneous files

    # convert .thickness files to ASCII
    # no need for python since nibabel can directly read the file
    # unix_wrapper('mris_convert -c {0}/surf/lh.thickness {0}/surf/lh.white {0}/surf/lh.thickness.asc'.format(str(fsdir)))
    # unix_wrapper('mris_convert -c {0}/surf/rh.thickness {0}/surf/rh.white {0}/surf/rh.thickness.asc'.format(str(fsdir)))

    # # convert .curv files to ASCII
    # unix_wrapper('mris_convert -c {0}/surf/lh.curv {0}/surf/lh.white {0}/surf/lh.curv.asc'.format(str(fsdir)))
    # unix_wrapper('mris_convert -c {0}/surf/rh.curv {0}/surf/rh.white {0}/surf/rh.curv.asc'.format(str(fsdir)))

    #### make mid-gray surface

    unix_wrapper('mris_expand -thickness {0}/surf/lh.white 0.5 {0}/surf/lh.graymid'.format(fsdir))
    unix_wrapper('mris_expand -thickness {0}/surf/rh.white 0.5 {0}/surf/rh.graymid'.format(fsdir))

    #### consolidate mid-gray surface stuff into a .mat file
    for hemi in ['lh' 'rh']:
        # read .graymid surface
        vertices,faces = fsio.read_geometry((Path(fsdir) / 'surf'/ f'{hemi}.graymid').str)

        # construct vertices (4 x V), becareful here, numpy and matlab index might be different!!!
        vertices = vertices.T + np.array([128, 129, 128]).reshape(-1,1)
        vertices = np.vstack((vertices, np.ones(vertices.shape[1]).reshape(1,-1)))

        # construct faces (F x 3)
        faces = faces[:,[0, 2, 1]]  # necessary to convert freesurfer to matlab

        # load auxiliary info (V x 1)
        thickness = fsio.read_morph_data((Path(fsdir) / 'surf' / f'{hemi}.thickness').str)
        curvature = fsio.read_morph_data((Path(fsdir) / 'surf' / f'{hemi}.curv').str)

        # get freesurfer labels (fslabels is V x 1)
        fslabels, _, _ = fsio.read_annot((Path(fsdir) / 'label' / f'{hemi}.aparc.annot').str)

        # save
        savepkl((Path(cvnpath('anatomicals')) / subjectid / f'{hemi}midgray.pkl'.format(hemi)).str,
          {'vertices':vertices, 'faces':faces, 'thickness':thickness, \
          'curvature':curvature, 'fslabels': fslabels})

    #### calculate gray-matter information

    if isempty(regexp(extraflags,'hires')):
        # load ribbon
        ribmgz = nib.load((Path(fsdir)/ 'mri' / 'ribbon.mgz').str)
        rib = fstoint(ribmgz.get_data())

        # load coordinates of surface vertices
        coord0 = stack(\
          (loadpkl(Path((cvnpath('anatomicals')) / subjectid / 'lhmidgray.mat').str)['vertices'],\
            loadpkl((Path(cvnpath('anatomicals')) / subjectid / 'rhmidgray.mat').str)['vertices']),\
          axis=1)

        #### use nearestNeighour, need to double check this
        nbrs = NearestNeighbors(1, metric='l2')
        nbrs.fit(coord0.T)
        dist, mnix = nbrs.kneighbors(rib, 1) # do I need to reshape dist and mnix?


        # compute distances to vertices [i.e. create a volume where gray matter voxels have certain informative values]
        #[dist,mnix] = surfaceslice2(ismember(rib,[3 42]),coord0, 3, 4)  # NOTICE HARD-CODED VALUES HERE
        ####

        # save
          # 1-mm volume with, for each gray matter voxel, distance to closest vertex (of mid-gray surface)
        nib.save(inttofs(dist), (Path(fsdir) / 'mri'/ 'ribbonsurfdist.mgz').str, ribmgz)
          # 1-mm volume with, for each gray matter voxel, index of closest vertex (of mid-gray surface)
        nib.save(inttofs(mnix),(Path(fsdir) / 'mri'/ 'ribbonsurfindex.mgz').str, ribmgz)


    #### calculate transfer functions

    # calc
    [tfunFSSSlh,tfunFSSSrh,tfunSSFSlh,tfunSSFSrh] = \
      calctransferfunctions((Path(cvnpath('freesurfer')).joinpath('fsaverage', 'surf','lh.sphere.reg')).str, \
                            (Path(cvnpath('freesurfer')).joinpath('fsaverage', 'surf','rh.sphere.reg')).str, \
                               (Path(cvnpath('freesurfer')).joinpath(subjectid, 'surf','lh.sphere.reg')).str, \
                               (Path(cvnpath('freesurfer')).joinpath(subjectid, 'surf','rh.sphere.reg')).str)

    # save
    savepkl((Path(cvnpath('anatomicals')) / subjectid /'tfun.mat').str,\
      {'tfunFSSSlh': tfunFSSSlh,\
      'tfunFSSSrh': tfunFSSSrh,\
      'tfunSSFSlh': tfunSSFSlh\
      'tfunSSFSrh': tfunSSFSrh})
def main(argv):
    try:
        opts, args = getopt.getopt(argv, "hf:a:", ["ifile=", "ofile="])
    except getopt.GetoptError:
        print('python freesurfer_label_USCBrain.py -f <freesurfer_sub>\
-a <USCBrain>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('python freesurfer_label_USCBrain.py -f \
<freesurfer_sub> -a <USCBrain>')
            sys.exit()
        elif opt in ("-f", "--ffile"):
            subbasename = arg
        elif opt in ("-a", "--afile"):
            USCBrainpath = arg
    print('FreeSurfer subid is :' + subbasename)
    print('USCBrain dir is :' + USCBrainpath)
    hemi = 'right'
    fshemi = 'rh'

    class s:
        pass

    class bci:
        pass

    for hi in range(2):
        if hi == 0:
            hemi = 'right'
            fshemi = 'rh'
        else:
            hemi = 'left'
            fshemi = 'lh'

        ''' USCBrain to FS processed BCI '''
        bci_bsti = readdfs(USCBrainpath +
                           '/BCI-DNI_brain.' + hemi + '.mid.cortex.dfs')
        bci_bst = readdfs(USCBrainpath + '/BCI-DNI_brain.' +
                          hemi + '.inner.cortex.dfs')
        bci_bst.labels = bci_bsti.labels
        bci_bst.vertices[:, 0] -= 96*0.8
        bci_bst.vertices[:, 1] -= 192*0.546875
        bci_bst.vertices[:, 2] -= 192*0.546875
        bci.vertices, bci.faces = fsio.read_geometry(fshemi + '.white')
        bci = interpolate_labels_colors(bci_bst, bci)

        ''' FS_BCI to FS BCI Sphere'''
        bci.vertices, bci.faces = fsio.read_geometry(fshemi + '.sphere.reg')

        ''' FS BCI Sphere to SUB FS Sphere'''
        s.vertices, s.faces = fsio.read_geometry(subbasename +
                                                 '/surf/' + fshemi +
                                                 '.sphere.reg')
        s = interpolate_labels_colors(bci, s)
        fslabels, _, _ = fsio.read_annot(subbasename +
                                         '/label/' + fshemi + '.aparc.annot')
        s.labels = s.labels * sp.int16(fslabels > 0)
        s.vColor[fslabels <= 0, :] = 0.5
        s.vertices, _ = fsio.read_geometry(subbasename + '/surf/' +
                                           fshemi + '.pial')
        so, _ = fsio.read_geometry(subbasename + '/surf/' + fshemi + '.white')
        s.vertices = (s.vertices + so)/2.0
        s.faces = s.faces[:, (0, 2, 1)]
        outfilename = subbasename + '/' + hemi + '.mid.cortex.fs.dfs'
        writedfs(outfilename, s)
        print('output file is : ' + outfilename)
Example #20
0

hemi = 'left'
fshemi = 'lh'
''' BCI to FS processed BCI '''
bci_bsti = readdfs('/home/ajoshi/BrainSuite19b/svreg/BCI-DNI\
_brain_atlas/BCI-DNI_brain.' + hemi + '.mid.cortex.dfs')
bci_bst = readdfs('/home/ajoshi/BrainSuite19b/svreg/BCI-DNI\
_brain_atlas/BCI-DNI_brain.' + hemi + '.inner.cortex.dfs')
bci_bst.labels = bci_bsti.labels
bci_bst.vertices[:, 0] -= 96 * 0.8
bci_bst.vertices[:, 1] -= 192 * 0.546875
bci_bst.vertices[:, 2] -= 192 * 0.546875
bci_fs.vertices, bci_fs.faces = fsio.read_geometry('/big_disk/ajoshi/data/BCI_\
DNI_Atlas/surf/' + fshemi + '.white')

fslabels, _, _ = fsio.read_annot(
    '/big_disk/ajoshi/freesurfer/subjects/BCI_DNI_Atlas/label/' + fshemi +
    '.economo.annot')
bci_fs.labels = fslabels

bci_bst = interpolate_labels(bci_fs, bci_bst)
bci_bsti.labels = bci_bst.labels
bci_bsti = patch_color_labels(bci_bsti)
view_patch_vtk(bci_bsti)

bci_bsti = smooth_patch(bci_bsti, iterations=10000)
view_patch_vtk(bci_bsti)

writedfs('BCI_DNI_economo_' + hemi + '.dfs', bci_bsti)
class g32k:
    pass


''' Right Hemisphere '''

g32ktmp = gread('/big_disk/ajoshi/data/standard_mesh_atlases/resample\
_fsaverage/fs_LR-deformed_to-fsaverage.R.sphere.32k_fs_LR.surf.gii')
g32k.vertices = g32ktmp.get_arrays_from_intent('NIFTI_INTENT_POINTSET')[0].data
g32k.faces = g32ktmp.get_arrays_from_intent('NIFTI_INTENT_TRIANGLE')[0].data
g32k.vColor = sp.ones(g32k.vertices.shape)

bci.vertices, bci.faces = fsio.read_geometry('/big_disk/ajoshi/data/BCI_\
DNI_Atlas/surf/rh.sphere.reg')
bci.labels = fsio.read_annot('/big_disk/ajoshi/data/BCI_DNI_Atlas/label/rh\
.BA.thresh.annot')[0]

g32k = interpolate_labels(fromsurf=bci, tosurf=g32k)
g32ktmp = gread('/big_disk/ajoshi/HCP_data/32k_ConteAtlas_v2/Conte69.R\
.very_inflated.32k_fs_LR.surf.gii')
g32k.vertices = g32ktmp.get_arrays_from_intent('NIFTI_INTENT_POINTSET')[0].data
g32k = patch_color_labels(g32k)
view_patch_vtk(g32k)

writedfs('Boradmann_32k_right.dfs', g32k)
''' Left Hemisphere '''

g32ktmp = gread('/big_disk/ajoshi/data/standard_mesh_atlases/resample\
_fsaverage/fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii')
g32k.vertices = g32ktmp.get_arrays_from_intent('NIFTI_INTENT_POINTSET')[0].data
g32k.faces = g32ktmp.get_arrays_from_intent('NIFTI_INTENT_TRIANGLE')[0].data
Example #22
0
def main():
    parser = _build_args_parser()
    args = parser.parse_args()
    logging.basicConfig(level=logging.INFO)

    # make sure the files exist
    if not isfile(args.lh_annot):
        parser.error('The file "{0}" must exist.'.format(args.lh_annot))

    if not isfile(args.rh_annot):
        parser.error('The file "{0}" must exist.'.format(args.rh_annot))

    # make sure files are not accidently overwritten
    if isfile(args.output):
        if args.overwrite:
            logging.info('Overwriting "{0}".'.format(args.output))
        else:
            parser.error(
                'The file "{0}" already exists. Use -f to overwrite it.'.
                format(args.output))

    # load annotation files
    lh_annot = read_annot(args.lh_annot)
    rh_annot = read_annot(args.rh_annot)

    lh_labels = np.array(lh_annot[0])
    rh_labels = np.array(rh_annot[0])

    # create the ROI name arrays, skipping those that are not included on the mesh
    idx = np.unique(lh_labels)
    lh_names = np.concatenate((np.array(lh_annot[2]), ['unknown']))[idx]

    idx = np.unique(rh_labels)
    rh_names = np.concatenate((np.array(rh_annot[2]), ['unknown']))[idx]

    # make sure the label ids correspond to the array indices of the names
    lh_label_map = dict(
        zip(np.unique(lh_labels), range(len(np.unique(lh_labels)))))
    lh_labels = np.array([lh_label_map[i] for i in lh_labels])

    rh_label_map = dict(
        zip(np.unique(rh_labels), range(len(np.unique(rh_labels)))))
    rh_labels = np.array([rh_label_map[i] for i in rh_labels])

    # create dictionary and calculate sizes for loop
    rois = dict()

    rois[0] = lh_labels
    rois[1] = rh_labels

    lh_orig_n = len(rois[0])
    rh_orig_n = len(rois[1])
    orig_n = lh_orig_n + rh_orig_n

    offset = 0
    snapped = defaultdict(list)

    logging.info('Merging vertices into parcellation')

    # loop through all vertices on full mesh and assign them to the closest vertex on resampled mesh
    for surface_id in range(2):
        n = len(rois[surface_id])

        for i in range(n):
            index = rois[surface_id][i] + (surface_id * 10000)

            snapped[index].append(i + offset)

        offset = offset + n

    # sort and save mapping
    snapped = OrderedDict(sorted(snapped.items(), key=lambda t: t[0]))

    # get the shape of the original mesh, and the new mesh
    lh_roi_n = (len(np.unique(rois[0])))
    rh_roi_n = (len(np.unique(rois[1])))
    roi_n = lh_roi_n + rh_roi_n

    logging.info('Number of ROIs: {0}'.format(str(roi_n)))

    # save the shape of the original and new mesh
    shape = np.array([roi_n, lh_roi_n, rh_roi_n, orig_n, lh_orig_n, rh_orig_n])

    lh_names = ['LH_' + name for name in lh_names]
    rh_names = ['RH_' + name for name in rh_names]

    # save the results: mapping contains arrays of vertex numbers from original mesh that have been assigned to each
    # of the vertices of the downsampled mesh; shape has the number of vertices for both high and low resolution meshes.
    np.savez_compressed(args.output,
                        mapping=snapped.values(),
                        shape=shape,
                        labels=snapped.keys(),
                        roi_names=np.concatenate((lh_names, rh_names)))
def connectivity_2_tvb_fs(subID, subFolder, SC_matrix, reconallFolder='recon_all'):

    # Create the results folder
    if not os.path.exists(subFolder + 'results/'):
        os.mkdir(subFolder + 'results/')

    # Load the SC matrix
    SC = io.loadmat(subFolder + '/mrtrix_68/tracks_68/' + SC_matrix)
    weights = SC['SC_cap_agg_bwflav2']
    delay = SC['SC_dist_agg_mean']

    # Load the required things compiuted previously by FREESURFER
    lh_vert, lh_faces = fs.read_geometry(subFolder + '/' + reconallFolder + '/surf/lh.pial')
    rh_vert, rh_faces = fs.read_geometry(subFolder + '/' + reconallFolder + '/surf/rh.pial')
    cortexMesh = {'vertices': np.vstack((lh_vert, rh_vert)),
                  'faces': np.vstack((lh_faces, rh_faces + np.shape(lh_vert)[0]))}

    # Calculate vertex-normals
    cortexMesh['vertexNormals'] = calcVertNormals(cortexMesh['vertices'], cortexMesh['faces'])

    # Load annotation tables
    lh_labels, lh_ctab, lh_names = fs.read_annot(subFolder + '/' + reconallFolder + '/label/lh.aparc.annot')
    rh_labels, rh_ctab, rh_names = fs.read_annot(subFolder + '/' + reconallFolder + '/label/rh.aparc.annot')
    # Remove the CC i.e. correct labeling
    lh_labels[lh_labels > 3] -= 1
    rh_labels[rh_labels > 3] -= 1
    # Combine into single vectors
    rh_labels += np.max(lh_labels)
    rh_labels[rh_labels == np.min(rh_labels)] = -1
    cortexMesh['labels'] = np.hstack((lh_labels, rh_labels))
    # Store label name-strings
    tmp = lh_names[1:4] + lh_names[5:]
    tmp_lh = ['lh_' + s for s in tmp]
    tmp_rh = ['rh_' + s for s in tmp]
    cortexMesh['labelNames'] = tmp_lh + tmp_rh

    # Do the TVB mesh clean
    cortexMesh['vertices'], cortexMesh['faces'], cortexMesh['vertexNormals'], cortexMesh['labels'] = removeFB(
        cortexMesh['vertices'], cortexMesh['faces'], cortexMesh['vertexNormals'], cortexMesh['labels'])

    # Now finally start storing things....
    # ############

    # Define the filenames
    filenames = ['weights.txt', 'centres.txt', 'tract.txt', 'orientation.txt', 'area.txt', 'cortical.txt', 'hemisphere.txt']

    # 1.) Weights
    np.savetxt(subFolder + 'results/' + filenames[0], weights, delimiter=' ', fmt='%1i')

    # 2.) Position
    # Calc region centers
    # centers = np.zeros((weights.shape[0], 3))
    with open(subFolder + 'results/' + filenames[1], 'w') as f:
        for i in range(weights.shape[0]):
            # First get all vertices corresponding to a certain region
            regionVertices = cortexMesh['vertices'][cortexMesh['labels'] == i + 1]
            # Compute the mean of each region
            tmp = np.mean(regionVertices, axis=0)
            # Now look for the nearest neighbors
            idx = np.sum(np.abs(cortexMesh['vertices'] - tmp), axis=1).argmin()
            # Define the nearest vertex as center
            # centers[i, :] = cortexMesh['vertices'][idx, :]
            center = cortexMesh['vertices'][idx, :]
            # Write file
            f.write('{0} {1} {2} {3}\n'.format(cortexMesh['labelNames'][i], str(center[0]), str(center[1]), str(center[2])))
        f.close()

    # 3.) Tract
    np.savetxt(subFolder + 'results/' + filenames[2], delay, delimiter=' ', fmt='%1i')

    # 4.) Orientation
    with open(subFolder + 'results/' + filenames[3], 'w') as f:
        for i in range(weights.shape[0]):
            # Get all vertex-normals correspodning to the vertices of the current region
            regionVertexNormals = cortexMesh['vertexNormals'][cortexMesh['labels'] == i + 1]
            # Compute mean vector
            orientation = np.mean(regionVertexNormals, axis=0)
            # Normalize it
            orientation /= np.sqrt(orientation[0]**2 + orientation[1]**2 + orientation[2]**2)
            # Write to file
            f.write('{0} {1} {2}\n'.format(str(orientation[0]), str(orientation[1]), str(orientation[2])))
        f.close()

    # 5.) Area
    # I'm not quite sure how to get to the exact value for the surface in mm^2
    # so for now i just count the surface vertices corresponding to each region
    # EDIT: According to the TVB Dokumentation, this attribute is not mandatory
    # for the Input!
    with open(subFolder + 'results/' + filenames[4], 'w') as f:
        for i in range(weights.shape[0]):
            area = np.count_nonzero(cortexMesh['labels'] == i)
            f.write('{0}\n'.format(str(area)))
        f.close()

    # 6.) Cortical
    # Since in the default atlas all areas are cortical
    cortical = np.ones((68, 1))
    np.savetxt(subFolder + 'results/' + filenames[5], cortical, delimiter=' ', fmt='%1i')

    # 7.) Hemisphere
    # Again hard coded for Desikan-Killany Mask!
    # TODO: Make this flexible!
    hemisphere = np.vstack((np.zeros((34, 1)), np.ones((34, 1))))
    np.savetxt(subFolder + 'results/' + filenames[6], hemisphere, delimiter=' ', fmt='%1i')

    # Assemble the Zip-File
    zf = zipfile.ZipFile(subFolder + 'results/' + subID + '_Connectivity.zip', mode='w')
    for fname in filenames:
        zf.write(subFolder + 'results/' + fname)
        os.remove(subFolder + 'results/' + fname)
    zf.close()
    ''' BCI to FS processed BCI '''
    bci_bsti = readdfs(
        '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.' +
        hemi + '.inner.cortex.dfs')
    bci_bst_mid = readdfs(
        '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.' +
        hemi + '.mid.cortex.dfs')

    bci_bsti.vertices[:, 0] -= 96 * 0.8
    bci_bsti.vertices[:, 1] -= 192 * 0.546875
    bci_bsti.vertices[:, 2] -= 192 * 0.546875
    bci.vertices, bci.faces = fsio.read_geometry(
        '/big_disk/ajoshi/data/BCI_DNI_Atlas/surf/' + fshemi + '.white')
    bci.labels = np.zeros(bci.vertices.shape[0])
    fslabels, label1, colortable1 = fsio.read_annot(
        '/big_disk/ajoshi/data/BCI_DNI_Atlas/label/' + fshemi +
        '.aparc.a2009s.annot')
    fslabels[fslabels < 0] = 0

    bci.labels = fslabels

    bci = patch_color_labels(bci)
    view_patch_vtk(bci)

    bci_bsti = interpolate_labels(bci, bci_bsti)
    bci_bst_mid.labels = bci_bsti.labels
    bci_bst_mid = smooth_patch(bci_bst_mid, iterations=3000, relaxation=.5)
    bci_bst_labels = patch_color_labels(bci_bst_mid)
    view_patch_vtk(bci_bst_labels)
    writedfs(outfile, bci_bst_labels)
Example #25
0
parser.add_argument('-index', type=int, nargs='+', default=None, help='color only the selected label')

parser.add_argument('-out_surface', type=str, default=None, help='output surface (.vtk)')
parser.add_argument('-out_vts_mask', type=str, default=None, help='output mask (npy array) from vts index')
parser.add_argument('--inverse_mask', action='store_true', default=False, help='inverse output mask')

parser.add_argument('--v', action='store_true', default=False, help='view surface')
parser.add_argument('--white', action='store_true', default=False, help='color white all label')
parser.add_argument('--info', action='store_true', default=False, help='view surface')

args = parser.parse_args()
print args.annot

mesh = TriMesh_Vtk(args.surface, None)
[vts_label, label_color, label_name] = read_annot(args.annot)

colors = label_color[:,:3]
vts_color =  colors[vts_label]

if args.info:
    for index in range(len(label_name)):
        print index, ": ", label_color[index], label_name[index]
    
#filter
if args.index is not None:
    print args.index
    mask = np.zeros([len(vts_label)], dtype=np.bool )
    for index in args.index:
        if index == -1:
            print "selected region :", index, "None" 
Example #26
0
def read_annotation_md(annotation_file,
                       hemisphere_label,
                       meta_data=None,
                       encoding="utf-8",
                       orig_ids=False):
    """
    Read annotation file and record meta data for it.

    For details on the first three return values, see http://nipy.org/nibabel/reference/nibabel.freesurfer.html#nibabel.freesurfer.io.read_annot as they are the output of that function. An exception is the last parameter (names, names_str in this function) which returns a different data type depending on the Python version for the nibabel function. This function always returns strings, independent of the Python version.

    Parameters
    ----------
    annotation_file: string
        A string representing a path to a FreeSurfer vertex annotation file (e.g., the path to 'lh.aparc.annot').

    hemisphere_label: {'lh' or 'rh'}
        A string representing the hemisphere this file belongs to. This is used to write the correct meta data.

    meta_data: dictionary | None, optional
        Meta data to merge into the output `meta_data`. Defaults to the empty dictionary.

    encoding: string describing an encoding, optional
        The encoding to use when decoding the label strings from binary. Only used in Python 3. Defaults to 'utf-8'.

    orig_ids: boolean, optional
        Passed on to nibabel.freesurfer.io.read_annot function. From the documentation of that function: 'Whether to return the vertex ids as stored in the annotation file or the positional colortable ids. With orig_ids=False vertices with no id have an id set to -1.' Defaults to False.

    Returns
    -------
    vertex_label_colors: ndarray, shape (n_vertices,)
        Contains an annotation color id for each vertex listed in the annotation file. If orig_ids is False (the default), and some vertex has no annotation, -1 is returned for it. IMPORTANT: The annotation value in here is NOT the label id. It is the color for the vertex, encoded in a weird way! Yes, this is ugly. See https://surfer.nmr.mgh.harvard.edu/fswiki/LabelsClutsAnnotationFiles#Annotation for details, especially the section 'Annotation file design surprise'. The color is encoded as a single number. Quoting the linked document, the numer is the 'RGB value combined into a single 32-bit integer: annotation value = (B * 256^2) + (G * 256) + (R)'. From this it follows that, quoting the doc once more, 'Code that loads an annotation file ... has to compare annotation values to the color values in the ColorLUT part of the annotation file to discover what parcellation label code (ie: structure code) corresponds.'

    label_colors: ndarray, shape (n_labels, 5)
        RGBT + label id colortable array. The first 4 values encode the label color: RGB is red, green, blue as usual, from 0 to 255 per value. T is the transparency, which is defined as 255 - alpha. The number of labels (n_label) cannot be know in advance.

    label_names: list of strings
       The names of the labels. The length of the list is n_labels. Note that, contrary to the respective nibabel function, this function will always return this as a list of strings, no matter the Python version used.

    meta_data: dictionary
        Contains detailed information on the data that was loaded. The following keys are available (replace `?h` with the value of the argument `hemisphere_label`, which must be 'lh' or 'rh').
            - `?h.annotation_file` : the file that was loaded
    """
    if hemisphere_label not in ('lh', 'rh'):
        raise ValueError(
            "ERROR: hemisphere_label must be one of {'lh', 'rh'} but is '%s'."
            % hemisphere_label)

    if meta_data is None:
        meta_data = {}

    logging.debug("Reading annotation file '%s'." % (annotation_file))
    vertex_label_colors, label_colors, label_names = fsio.read_annot(
        annotation_file, orig_ids=orig_ids)

    label_file = hemisphere_label + '.annotation_file'
    meta_data[label_file] = annotation_file

    # The nibabel read_annot function returns string under Python 2 and bytes under Python 3, see http://nipy.org/nibabel/reference/nibabel.freesurfer.html#nibabel.freesurfer.io.read_annot.
    # We convert this to strings here so we always return strings.
    try:
        label_names_decoded = [name.decode(encoding) for name in label_names]
        label_names = label_names_decoded
    except AttributeError:
        pass

    return vertex_label_colors, label_colors, label_names, meta_data
Example #27
0
"""
import scipy as sp
import nibabel.freesurfer.io as fsio
from surfproc import view_patch
from dfsio import writedfs
from nibabel.gifti.giftiio import read as gread

def multidim_intersect(arr1, arr2):
    arr1_view = arr1.view([('',arr1.dtype)]*arr1.shape[1])
    arr2_view = arr2.view([('',arr2.dtype)]*arr2.shape[1])
    intersected = sp.intersect1d(arr1_view, arr2_view)
    return intersected.view(arr1.dtype).reshape(-1, arr1.shape[1])

inputfile='/home/ajoshi/data/Yeo_JNeurophysiol11_FreeSurfer/fsaverage/label/lh.Yeo2011_17Networks_N1000.annot'
fsavesurf='/home/ajoshi/data/Yeo_JNeurophysiol11_FreeSurfer/fsaverage/surf/lh.sphere.reg.avg'
yeomap,_,_=fsio.read_annot(inputfile)
vert,faces=fsaverage_surf=fsio.read_geometry(fsavesurf)
class s:
    pass



s.vertices=vert; s.faces=faces;s.labels=yeomap
view_patch(s,yeomap)

#g_surf = gread('/home/ajoshi/data/HCP_data/reference/100307/MNINonLinear/fsaverage_LR32k/100307.L.sphere.32k_fs_LR.surf.gii')
g_surf = gread('/home/ajoshi/data/HCP_data/reference/100307/MNINonLinear/Native/100307.L.sphere.reg.native.surf.gii')
vert = g_surf.darrays[0].data
face = g_surf.darrays[1].data

class hcp32k:
Example #28
0
def load(what, parameters, ieeg_file, opts=None):
    """
    WHAT:
      - 'continuous' returns: ChanTime, event_names, events_onsets
      - 'data' returns: ChanTime, event_names
      - 'events' returns: ndarray
      - 'dataglove' returns: ndarray
      - 'electrodes'
      - 'freesurfer'
      - 'pial' returns original freesurfer pial mesh
      - 'surface' returns full mesh which should better reflect brain size
      - 'aparc'
      - 'aparc.a2009s'
      - 'aparc.DKTatlas'
      - 'BA_exvivo'

    EVENT_TYPE:
      - cues : all cues (to open and close)
      - open : cues to open fingers
      - close : cues to close fingers
      - movements : all actual movements (from dataglove)
      - extension : actual extension of all fingers
      - flexion : actual flexion of all fingers
      - realigned : realigned movement
    """
    if opts in ('data', 'continuous'):
        if opts is None:
            raise ValueError('You need to specify event_type')
        if opts not in ['cues', 'open', 'close', 'movements', 'extension', 'flexion', 'realigned']:
            raise ValueError(f'"{opts}" is not one of the possible event types')

    if what in ('data', 'continuous'):
        if opts is None:
            raise ValueError('You need to specify opts which is a dict with event_type, pre, post')

    ieeg = Task(ieeg_file)

    if what in ('continuous', 'data'):
        events_tsv = load('events', parameters, ieeg_file, opts['event_type'])
        events = events_tsv['trial_type']
        onsets = events_tsv['onset']

        if what == 'continuous':
            data = read_data(parameters, ieeg_file, event_onsets=onsets, opts=opts, continuous=True)
            return data, events, onsets
        elif what == 'data':
            data = read_data(parameters, ieeg_file, event_onsets=onsets, opts=opts, continuous=False)
            return data, events

    if what == 'electrodes':
        pattern = f'sub-{ieeg.subject}_*_acq-{ieeg.acquisition}_electrodes.tsv'
        folder = parameters['paths']['input']

    elif what == 'events':
        if opts in ('cues', 'open', 'close'):
            pattern = f'sub-{ieeg.subject}_*_run-{ieeg.run}_events.tsv'
            folder = parameters['paths']['input']
        elif opts in ('movements', 'extension', 'flexion'):
            pattern = f'sub-{ieeg.subject}_*_run-{ieeg.run}_dataglove.tsv'
            folder = parameters['paths']['movements']
        elif opts in ('realigned', ):
            event_path = name(parameters, 'realign_tsv', ieeg_file)
            pattern = event_path.name
            folder = event_path.parent

    elif what == 'dataglove':
        pattern = f'sub-{ieeg.subject}_*_run-{ieeg.run}_recording-dataglove_physio.tsv.gz'
        folder = parameters['paths']['input']

    elif what in ['pial', 'freesurfer', ] + FS_LABELS:
        pattern = 'sub-' + ieeg.subject
        folder = parameters['paths']['freesurfer_subjects_dir']

    elif what == 'surface':

        elec = load('electrodes', parameters, ieeg_file)
        right_or_left = (elec['x'] > 0).sum() / elec.shape[0]
        if right_or_left > 0.5:
            pattern = 'rh.pial'
        else:
            pattern = 'lh.pial'

        folder = name(parameters, 'surface_dir', ieeg_file)

    else:
        raise ValueError(f'Unrecognize "{what}" selection')

    found = list(folder.rglob(pattern))
    if len(found) == 0:
        raise FileNotFoundError(f'Could not find any file matching {pattern} in {folder}')
    elif len(found) > 1:
        raise ValueError('You need to specify more parameters')
    filename = found[0]

    if what == 'electrodes':
        elec = Electrodes(filename)
        return elec.electrodes.tsv[['name', 'x', 'y', 'z']]

    elif what == 'events':
        with filename.open() as f:
            x = f.readline()
        n_columns = x.count('\t') + 1
        dtypes = [
            ('onset', 'float'),
            ('duration', 'float'),
            ('trial_type', 'U4096'),
            ]
        if n_columns >= 4:
            dtypes.insert(3, ('value', 'int'))
        if n_columns == 5:
            dtypes.insert(3, ('response_time', 'float'))  # if -1, it means that we can reject trial

        events = genfromtxt(filename, delimiter='\t', skip_header=1, dtype=dtypes)

        if n_columns == 4:
            x = empty(len(events), dtype='float')
            x.fill(NaN)
            events = append_fields(events, 'response_time', x, usemask=False)

        return select_events(events, opts)

    elif what == 'dataglove':
        return read_physio(filename)

    elif what == 'pial':
        elec = load('electrodes', parameters, ieeg_file)
        right_or_left = (elec['x'] > 0).sum() / elec.shape[0]
        return read_surf(filename, right_or_left)

    elif what == 'surface':
        return Surf(filename)

    elif what in FS_LABELS:
        fs = load('freesurfer', parameters, ieeg_file)
        pial = load('pial', parameters, ieeg_file)
        hemi = pial.surf_file.stem

        aparc_file = fs.dir / 'label' / f'{hemi}.{what}.annot'
        region_values, region_ctab, region_names = read_annot(aparc_file)

        out = {
            'aparc': what,
            'ras_shift': fs.surface_ras_shift,
            'vert': pial.vert,
            'regions': {
                'values': region_values,
                'names': [x.decode() for x in region_names],
                'colors': read_brainregion_colors(region_names, region_ctab),
                'colorscale': read_brainregion_colorscale(region_ctab),
                }
            }

        return out

    elif what == 'freesurfer':
        return Freesurfer(filename)
Example #29
0
def main():
    parser = _build_args_parser()
    args = parser.parse_args()
    logging.basicConfig(level=logging.INFO)

    # make sure the files exist
    if not isfile(args.lh_annot):
        parser.error('The file "{0}" must exist.'.format(args.lh_annot))

    if not isfile(args.rh_annot):
        parser.error('The file "{0}" must exist.'.format(args.rh_annot))

    if not isfile(args.mesh):
        parser.error('The file "{0}" must exist.'.format(args.mesh))

    # make sure files are not accidently overwritten
    if isfile(args.output):
        if args.overwrite:
            logging.info('Overwriting "{0}".'.format(args.output))
        else:
            parser.error(
                'The file "{0}" already exists. Use -f to overwrite it.'.
                format(args.output))

    if isfile(args.matlab):
        if args.overwrite:
            logging.info('Overwriting "{0}".'.format(args.output))
        else:
            parser.error(
                'The file "{0}" already exists. Use -f to overwrite it.'.
                format(args.matlab))

    # load annotation files
    lh_annot = read_annot(args.lh_annot)
    rh_annot = read_annot(args.rh_annot)

    lh_labels = np.array(lh_annot[0])
    rh_labels = np.array(rh_annot[0])

    # apply any given masks
    if not args.lh_mask == None:
        logging.info('Adding mask to LH.')

        mask = np.load(args.lh_mask, allow_pickle=True)['mask']
        lh_labels[mask == 1] = -1

    if not args.rh_mask == None:
        logging.info('Adding mask to RH.')

        mask = np.load(args.rh_mask, allow_pickle=True)['mask']
        rh_labels[mask == 1] = -1

    # load mapping
    mesh = np.load(args.mesh, allow_pickle=True)

    mapping = mesh['mapping']
    shape = mesh['shape']

    logging.info('Number of LH vertices: {0}'.format(shape[1]))
    logging.info('Number of RH vertices: {0}'.format(shape[2]))

    # set the label to the most frequent found in each mapping
    lh_vertices = np.array(
        [stats.mode(lh_labels[mapping[i]])[0][0] for i in range(shape[1])])
    rh_vertices = np.array([
        stats.mode(rh_labels[mapping[i] - shape[4]])[0][0]
        for i in range(shape[1], shape[0])
    ])

    # create the ROI name arrays, skipping those that are not included on the mesh
    idx = np.unique(lh_vertices)
    lh_names = np.concatenate((np.array(lh_annot[2]), ['missing']))[idx]

    idx = np.unique(rh_vertices)
    rh_names = np.concatenate((np.array(rh_annot[2]), ['missing']))[idx]

    # make sure the label ids correspond to the array indices of the names
    lh_label_map = dict(
        zip(np.unique(lh_vertices), range(len(np.unique(lh_vertices)))))
    new_lh_labels = np.array([lh_label_map[i] for i in lh_vertices])

    rh_label_map = dict(
        zip(np.unique(rh_vertices), range(len(np.unique(rh_vertices)))))
    new_rh_labels = np.array([rh_label_map[i] for i in rh_vertices
                              ]) + np.max(new_lh_labels) + 1

    logging.info('Number of LH ROIs: {0}'.format(len(np.unique(lh_vertices))))
    logging.info('Number of RH ROIs: {0}'.format(len(np.unique(rh_vertices))))

    # group vertices by ROI
    new_order = np.concatenate(
        [np.argsort(lh_vertices),
         np.argsort(rh_vertices) + shape[1]])

    lh_names = ['LH_' + name for name in lh_names]
    rh_names = ['RH_' + name for name in rh_names]

    # save the results
    np.savez_compressed(args.output,
                        sorted_idx=new_order,
                        fs_labels=np.concatenate((lh_vertices, rh_vertices)),
                        sbci_labels=np.concatenate(
                            (new_lh_labels, new_rh_labels)),
                        names=np.concatenate((lh_names, rh_names)),
                        colors=None)

    scio.savemat(
        args.matlab, {
            'sorted_idx': new_order,
            'labels': np.concatenate((new_lh_labels, new_rh_labels)),
            'names': np.concatenate((lh_names, rh_names))
        })
vert32k = gL.darrays[0].data
faces32k = gL.darrays[1].data
lh_sph.vertices = vert
lh_sph.faces = faces
lh_sph.labels = yeomapL
lh32k.vertices = vert32k
lh32k.faces = faces32k
lh32k = interpolate_labels(lh_sph, lh32k)
gL = nib.load(fsAve_32k_L)
lh32k.vertices = gL.darrays[0].data
lh32k.faces = gL.darrays[1].data
lh32k = patch_color_labels(lh32k)
view_patch_vtk(lh32k)

# process right hemisphere
yeomapR, _, _ = fsio.read_annot(inputfile_R)
vert, faces = fsio.read_geometry(fsAve_sph_R)
gR = nib.load(fsAve_sph_32k_R)
vert32k = gR.darrays[0].data
faces32k = gR.darrays[1].data
rh_sph.vertices = vert
rh_sph.faces = faces
rh_sph.labels = yeomapR
rh32k.vertices = vert32k
rh32k.faces = faces32k
rh32k = interpolate_labels(rh_sph, rh32k)
gR = nib.load(fsAve_32k_R)
rh32k.vertices = gR.darrays[0].data
rh32k.faces = gR.darrays[1].data
rh32k = patch_color_labels(rh32k)
view_patch_vtk(rh32k)
Example #31
0
def smooth_aparc(insurfname, inaparcname, incortexname, outaparcname):
    """ (string) -> None
    smoothes aparc
    """
    # read input files
    print("Reading in surface: {} ...".format(insurfname))
    surf = read_geometry(insurfname, read_metadata=True)
    print("Reading in annotation: {} ...".format(inaparcname))
    aparc = fs.read_annot(inaparcname)
    print("Reading in cortex label: {} ...".format(incortexname))
    cortex = fs.read_label(incortexname)
    # set labels (n) and triangles (n x 3)
    labels = aparc[0]
    faces  = surf[1]
    nvert = labels.size
    if labels.size != surf[0].shape[0]:
        sys.exit("ERROR smooth_aparc: vertec count "+format(surf[0].shape[0])+" does not match label length "+format(labels.size))

    # Compute Cortex Mask
    mask = np.zeros(labels.shape, dtype=bool)
    mask[cortex] = True
    # check if we have places where non-cortex has some labels
    noncortnum=np.where(~mask & (labels != -1))
    print("Non-cortex vertices with labels: "+str(noncortnum[0].size)) # num of places where non cortex has some real labels
    # here we need to decide how to deal with them
    # either we set everything outside cortex to -1 (the FS way)
    # or we keep these real labels and allow them to vote, maybe even shrink cortex label? Probably not.

    # get non-cortex ids (here we could subtract the ids that have a real label)
    # for now we remove everything outside cortex
    noncortids = np.where(~mask)
    
    # remove triangles where one vertex is non-cortex to avoid these edges to vote on neighbors later
    rr = np.in1d(faces, noncortids)
    rr = np.reshape(rr, faces.shape)
    rr = np.amax(rr, 1)
    faces = faces[~rr, :]
    
    # get Edge matrix (adjacency)
    adjM = get_adjM(faces, nvert)
    
    # add identity so that each vertex votes in the mode filter below
    adjM = adjM + sparse.eye(adjM.shape[0])
    
    #print("adj shape: {}".format(adjM.shape))
    #print("v shape: {}".format(surf[0].shape))
    #print("labels shape: {}".format(labels.size))
    #print("labels: {}".format(labels))
    #print("minlab: "+str(np.min(labels))+" maxlab: "+str(np.max(labels)))
    
    # set all labels inside cortex that are -1 or 0 to fill label
    fillonlylabel = np.max(labels)+1
    labels[mask & (labels == -1)] = fillonlylabel
    labels[mask & (labels == 0)]  = fillonlylabel
    # now we do not have any -1 or 0 (except 0 outside of cortex)
    # FILL HOLES
    ids = np.where(labels == fillonlylabel)[0]
    counter = 1
    idssize = ids.size
    while idssize != 0:
        print("Fill Round: "+str(counter))
        labels_new = mode_filter(adjM, labels, fillonlylabel, np.array([fillonlylabel]))
        labels = labels_new
        ids = np.where(labels == fillonlylabel)[0]
        if ids.size == idssize:
            # no more improvement, strange could be an island in the cortex label that cannot be filled
            print("Warning: Cannot improve but still have holes. Maybe there is an island in the cortex label that cannot be filled with real labels.")
            fillids = np.where(labels == fillonlylabel)[0]
            labels[fillids] = 0
            rr = np.in1d(faces, fillids)
            rr = np.reshape(rr, faces.shape)
            rr = np.amax(rr, 1)
            faces = faces[~rr, :]
            # get Edge matrix (adjacency)
            adjM = get_adjM(faces, nvert)
            # add identity so that each vertex votes in the mode filter below
            adjM = adjM + sparse.eye(adjM.shape[0])
            break
        idssize = ids.size
        counter += 1
    # SMOOTH other labels (first with wider kernel then again fine-tune):
    labels = mode_filter(adjM*adjM, labels)
    labels = mode_filter(adjM, labels)
    # set labels outside cortex to -1
    labels[~mask] = -1
    print ("Outputing fixed annot: {}".format(outaparcname))
    fs.write_annot(outaparcname, labels, aparc[1], aparc[2])
    # view_patch_vtk(bci)

    bci_bst_mid_sm = smooth_patch(bci_bst_mid, iterations=3000, relaxation=.5)

    for parcels, nets in itertools.product([100, 200, 300, 400, 500, 600, 700, 800, 900, 1000], [7, 17]):
        cmd3 = 'mri_surf2surf --hemi ' + fshemi + ' --srcsubject fsaverage --trgsubject BCI_DNI_Atlas --sval-annot /ImagePTE1/ajoshi/code_farm/hybridatlas/Schaefer2018/FreeSurfer5.3/fsaverage/label/' + fshemi + '.Schaefer2018_' + \
            str(parcels)+'Parcels_'+str(nets)+'Networks_order.annot --tval /big_disk/ajoshi/freesurfer/subjects/BCI_DNI_Atlas/label/' + \
            str(fshemi)+'.Schaefer2018_'+str(parcels) + \
            'Parcels_' + str(nets) + 'Networks_order.annot'
        #os.system(cmd1 + ';' + cmd2 + ';' + cmd3)
        os.system(cmd3)
        outfile = 'BCI-DNI_Schaefer2018_' + \
            str(parcels)+'Parcels_'+str(nets) + \
            'Networks' + '.' + hemi + '.mid.cortex.dfs'

        ''' BCI to FS processed BCI '''
        bci.labels = np.zeros(bci.vertices.shape[0])
        fslabels, label1, colortable1 = fsio.read_annot('/big_disk/ajoshi/freesurfer/subjects/BCI_DNI_Atlas/label/'+str(
            fshemi)+'.Schaefer2018_'+str(parcels)+'Parcels_' + str(nets) + 'Networks_order.annot')
        fslabels[fslabels < 0] = 0

        bci.labels = fslabels
        bci = patch_color_labels(bci)

        bci_bsti = interpolate_labels(bci, bci_bsti)
        bci_bst_mid.labels = bci_bsti.labels
        bci_bst_mid_sm.labels = bci_bst_mid.labels
        bci_bst_labels = patch_color_labels(bci_bst_mid_sm)
        #view_patch_vtk(bci_bst_labels)
        writedfs(outfile, bci_bst_labels)