Example #1
0
def adaptive_parcellation(fwd, inv, subject, subjects_dir, hemi='both'):
    """ 
    """
    # Magic happens
    # res = mne.minimum_norm.make_inverse_resolution_matrix(fwd, inv)
    # ...

    # However, here's a magicless example
    labels_aparc = mne.read_labels_from_annot(subject, parc='aparc', hemi='lh', 
                                              subjects_dir=subjects_dir)

    # add frontal pole label to lh
    vertices = labels_aparc[5].vertices
    label1 = mne.Label(vertices, hemi='lh', name='frontalpole', 
                       subject=subject, color=(1,0,0,1))

    # add label of rightmost vertices to rh
    vertices = inv['src'][1]['vertno']
    vertices = [vv for vv in vertices if inv['src'][1]['rr'][vv, 0] > 0.05]
    label2 = mne.Label(vertices, hemi='rh', name='lateral', 
                       subject=subject, color=(0,1,0,1))
    label2 = label2.fill(inv['src'])

    labels = [label1, label2]
    return labels
Example #2
0
def merge_labels(subject, fmri_names):
    utils.delete_folder_files(op.join(MMVT_DIR, subject, 'fmri', 'labels'))
    vertices_labels_lookup = utils.load(
        op.join(MMVT_DIR, subject,
                'aparc.DKTatlas40_vertices_labels_lookup.pkl'))
    output_fol = utils.make_dir(op.join(MMVT_DIR, subject, 'fmri', 'labels'))
    for fmri_name in fmri_names:
        labels = []
        for hemi in utils.HEMIS:
            surf_fname = op.join(
                MMVT_DIR, subject, 'fmri', 'fmri_{}_{}.npy'.format(
                    fmri_name.replace('_insulaopercula', ''), hemi))
            surf_data = np.load(surf_fname)
            vertices_indices = np.where(surf_data >= 0.95)[0]
            if len(vertices_indices) == 0:
                continue
            insulaopercula_vertices = []
            vertices, _ = utils.read_pial(subject, MMVT_DIR, hemi)
            for vert_ind in tqdm(vertices_indices):
                vert_label = vertices_labels_lookup[hemi][vert_ind]
                if vert_label.startswith(
                        'insula'
                ):  # or vert_label.startswith('parsopercularis')
                    insulaopercula_vertices.append(vert_ind)
            label = mne.Label(insulaopercula_vertices,
                              vertices[insulaopercula_vertices],
                              hemi=hemi,
                              name=fmri_name,
                              subject=subject)
            labels.append(label)
            label.save(op.join(output_fol, '{}.label'.format(fmri_name)))
        anat.labels_to_annot(subject, atlas=fmri_name, labels=labels)
        anat.calc_labeles_contours(subject, fmri_name)
Example #3
0
def get_volume_labels(volume):
    labels = []
    for val, vol in enumerate(volume):
        vertices = np.sort(vol['vertno'])

        pos = vol['rr'][vertices]

        values = np.full(len(vertices), 200 + val)

        if vol['seg_name'].endswith('lh'):
            hemi = 'lh'
        elif vol['seg_name'].endswith('rh'):
            hemi = 'rh'
        lab = mne.Label(vertices=vertices,
                        pos=pos,
                        values=values,
                        hemi=hemi,
                        comment=vol['seg_name'],
                        name=vol['seg_name'],
                        filename=None,
                        subject=vol['subject_his_id'],
                        verbose=None)
        labels.append(lab)

    return labels
Example #4
0
def loadannot_gifti(parcname,
                    subject,
                    subjects_dir,
                    labnam=None,
                    surf_type='pial',
                    surf_struct=None,
                    quiet=False):

    import numpy as np
    from nibabel import gifti

    fname = os.path.join(subjects_dir, subject, 'label',
                         'lh.%s.%sgii' % (parcname, '%s'))
    fname = match_gifti_intent(fname, 'label')

    annot_lh = gifti.read(parse.hemineutral(fname) % 'lh')
    annot_rh = gifti.read(parse.hemineutral(fname) % 'rh')

    #unpack the annotation data
    labdict_lh = parse.appendhemis(annot_lh.labeltable.get_labels_as_dict(),
                                   "lh_")
    labv_lh = map(labdict_lh.get, annot_lh.darrays[0].data)

    labdict_rh = parse.appendhemis(annot_rh.labeltable.get_labels_as_dict(),
                                   "rh_")
    labv_rh = map(labdict_rh.get, annot_rh.darrays[0].data)

    labv = labv_lh + labv_rh

    #return labv
    #The objective is now to create MNE label files for these on the fly

    vertices = np.vstack((surf_struct.lh_verts, surf_struct.rh_verts))
    mne_labels = []

    for lab in labnam:
        cur_lab_verts = np.flatnonzero(np.array(labv) == lab)
        cur_lab_pos = vertices[cur_lab_verts]

        cur_lab = mne.Label(cur_lab_verts,
                            pos=cur_lab_pos / 1000,
                            hemi=lab[:2],
                            name=parse.demangle_hemi(lab))
        mne_labels.append(cur_lab)

    return mne_labels
Example #5
0
def label_from_annot(sss, subject, subjects_dir, parc=None, color=(0, 0, 0)):
    """Label for known regions of a source space

    Parameters
    ----------
    sss : mne.SourceSpaces
        Source space.
    subject : str
        MRI-subject.
    subjects_dir : str
        MRI subjects-directory.
    parc : str
        Parcellation name.
    color : matplotlib color
        Label color.

    Returns
    -------
    label : mne.Label
        Label encompassing known regions of ``parc`` in ``sss``.
    """
    fname = SourceSpace._ANNOT_PATH.format(subjects_dir=subjects_dir,
                                           subject=subject,
                                           hemi='%s',
                                           parc=parc)

    # find vertices for each hemisphere
    labels = []
    for hemi, ss in zip(('lh', 'rh'), sss):
        annotation, _, names = read_annot(fname % hemi)
        bad = [-1, names.index(b'unknown')]
        keep = ~np.in1d(annotation[ss['vertno']], bad)
        if np.any(keep):
            label = mne.Label(ss['vertno'][keep], hemi=hemi, color=color)
            labels.append(label)

    # combine hemispheres
    if len(labels) == 2:
        lh, rh = labels
        return lh + rh
    elif len(labels) == 1:
        return labels.pop(0)
    else:
        raise RuntimeError("No vertices left")
Example #6
0
def create_functional_rois(subject,
                           contrast_name,
                           clusters_labels_fname='',
                           func_rois_folder=''):
    if clusters_labels_fname == '':
        clusters_labels = utils.load(
            op.join(BLENDER_ROOT_DIR, subject, 'fmri',
                    'clusters_labels_{}.npy'.format(contrast_name)))
    if func_rois_folder == '':
        func_rois_folder = op.join(SUBJECTS_DIR, subject, 'mmvt', 'fmri',
                                   'functional_rois',
                                   '{}_labels'.format(contrast_name))
    utils.delete_folder_files(func_rois_folder)
    for cl in clusters_labels:
        cl_name = 'fmri_{}_{:.2f}'.format(cl['name'], cl['max'])
        new_label = mne.Label(cl['vertices'],
                              cl['coordinates'],
                              hemi=cl['hemi'],
                              name=cl_name,
                              filename=None,
                              subject=subject,
                              verbose=None)
        new_label.save(op.join(func_rois_folder, cl_name))
# let's plot this matrix
fig, ax = plt.subplots(figsize=(4, 4))
ax.imshow(corr, cmap='viridis', clim=np.percentile(corr, [5, 95]))
fig.tight_layout()

##############################################################################
# Compute the degree and plot it
# ------------------------------

# sphinx_gallery_thumbnail_number = 2
threshold_prop = 0.15  # percentage of strongest edges to keep in the graph
degree = mne.connectivity.degree(corr, threshold_prop=threshold_prop)
stc = mne.labels_to_stc(labels, degree)
stc = stc.in_label(
    mne.Label(inv['src'][0]['vertno'], hemi='lh') +
    mne.Label(inv['src'][1]['vertno'], hemi='rh'))
brain = stc.plot(clim=dict(kind='percent', lims=[75, 85, 95]),
                 colormap='gnuplot',
                 subjects_dir=subjects_dir,
                 views='dorsal',
                 hemi='both',
                 smoothing_steps=25,
                 time_label='Beta band')

##############################################################################
# References
# ----------
# .. [1] Hipp JF, Hawellek DJ, Corbetta M, Siegel M, Engel AK (2012)
#        Large-scale cortical correlation structure of spontaneous
#        oscillatory activity. Nature Neuroscience 15:884–890
labels = mne.labels_from_parc('fsaverage', parc='Yeo2011_7Networks_N1000')[0]
net_labels = labels[:-2]  # the last two are the medial wall
# fill them in so we can morph them later
for label in net_labels:
    label.values.fill(1.0)
labels = mne.labels_from_parc('fsaverage', parc='aparc')[0]
# because the labels represent the whole network, I'll need to compute connecitvity among all sources within the label, instead of extracting one time course per label. But that turned out to be a HUGE matrix, so let's identify the intersection of aparc labels and Yeo labels, and just use those. But we need to compute the intersection using fsaverage, to make sure every subject has the same number of labels.
avg_intersects = []
for net in net_labels:
    label_intersect = []
    for l in labels:
        verts_in_both = np.intersect1d(l.vertices, net.vertices)
        if len(verts_in_both) > 0:
            label_intersect.append(
                mne.Label(vertices=verts_in_both,
                          hemi=net.hemi,
                          subject=net.subject,
                          name=net.name))
    avg_intersects.append(label_intersect)

for subj in subjs[95:115]:
    er_fname = empty_room_dir + 'empty_room_' + closest_empty_room[
        subj] + '_raw.fif'
    raw_fname = data_dir + 'fifs/rest/%s_rest_LP100_CP3_DS300_raw.fif' % subj
    fwd_fname = data_dir + 'analysis/rest/%s_rest_LP100_CP3_DS300_raw-5-fwd.fif' % subj
    forward = mne.read_forward_solution(fwd_fname, surf_ori=True)
    raw = mne.fiff.Raw(raw_fname, preload=True, compensation=3)
    er_raw = mne.fiff.Raw(er_fname, preload=True, compensation=3)
    picks = mne.fiff.pick_channels_regexp(raw.info['ch_names'], 'M..-*')
    raw.filter(l_freq=1, h_freq=50, picks=picks)
    er_raw.filter(l_freq=1, h_freq=50, picks=picks)
Example #9
0

#%% find eye and arm vertices of primary motor cortex
eyelabels = {}
armlabels = {}
for hemi in ['lh', 'rh']:
    mlabels = mne.read_labels_from_annot('fsaverage', parc='HCPMMP1_motor', 
                                         hemi=hemi)
    m1label, = mne.read_labels_from_annot('fsaverage', parc='HCPMMP1_5_8', 
                                         hemi=hemi, regexp='_4_')
    
    get_label = lambda pat: [label for label in mlabels 
                             if label.name.find(pat) >= 0][0]
    
    eyelabels[hemi] = mne.Label(
            np.intersect1d(m1label.vertices, get_label('Ocular').vertices),
            hemi=hemi, name='M1_eye', subject=m1label.subject)
    armlabels[hemi] = mne.Label(
            np.intersect1d(m1label.vertices, 
                           get_label('UpperExtremity').vertices),
            hemi=hemi, name='M1_arm', subject=m1label.subject)


#%% load data
subjects = helpers.find_available_subjects(megdatadir=helpers.megdatadir)

srcfile = os.path.join(ss.bem_dir, srcfile)

epochs_mean = pd.read_hdf(srcfile, 'epochs_mean')
epochs_std = pd.read_hdf(srcfile, 'epochs_std')
temp = temp3.in_label(V1_label_lh)
v1_vertices = temp.vertices[0]

###############################################################################
""" Just to visualize the new ROI """
mask = np.logical_and(times >= 0.08, times <= 0.12)

lh_label = temp3.in_label(V1_label_lh)
data = np.max(lh_label.data[:,mask],axis=1)
lh_label.data[data < 1.72] = 0.

temp_labels, _ = mne.stc_to_label(lh_label, src='fsaverage', smooth=False,
                      subjects_dir=fs_dir, connected=False)
temp = temp3.in_label(temp_labels)
v1_vertices = temp.vertices[0]
new_label = mne.Label(v1_vertices, hemi='lh')
brain3_1.add_label(new_label, borders=True, color='k')
###############################################################################
mask = np.logical_and(times >= 0.38, times <= 0.42)
lh_label = temp3.in_label(TE2p_label_lh)
data = np.mean(lh_label.data[:,mask],axis=1)
lh_label.data[data < 1.72] = 0.

temp_labels, _ = mne.stc_to_label(lh_label, src='fsaverage', smooth=False,
                      subjects_dir=fs_dir, connected=False)
temp = temp3.in_label(temp_labels)
ventral_vertices = temp.vertices[0]

lh_label = temp3.in_label(PH_label_lh)
data = np.mean(lh_label.data[:,mask],axis=1)
lh_label.data[data < 1.72] = 0.
Example #11
0
    def simulate_raw_data(self):
        """
        Simulates raw data
        """
        def expand_sim_stc_data(orig_times, t_index, data, interval):
            template = np.zeros((data.shape[0], len(orig_times)))
            template[:, t_index:t_index + interval] = data
            return template

        print("Generating simulated raw data...")

        if not self.data_template:
            self.data_template = self.__create_data_template()

        if self.n_dipoles == 1:
            time_steps = int(self.samples_per_dipole * self.spacing_t_steps *
                             (1 + self.empty_signal))
        else:
            time_steps = int(self.samples_per_dipole *
                             (self.n_simulations + self.spacing_t_steps) *
                             (1 + self.empty_signal))

        data_template = self.data_template.copy()
        data_template = data_template.crop(
            0.,
            data_template.times[time_steps])  # Crop a suitable piece of data
        t_delta = np.mean(np.gradient(data_template.times))
        times = np.arange(time_steps) * t_delta
        sim_stc = simulate_sparse_stc(
            self.src, n_dipoles=0, times=times,
            data_fun=self.data_func)  # Create empty time series

        s = 0
        lh_labels = mne.Label(self.lh_labels, hemi="lh")
        rh_labels = mne.Label(self.rh_labels, hemi="rh")

        lh_labels.values = np.zeros(lh_labels.values.shape)
        rh_labels.values = np.zeros(rh_labels.values.shape)

        # TODO: selection of hemisphere(s)
        self.times_per_dipole = int(self.samples_per_dipole *
                                    (1 + self.empty_signal))
        if self.n_dipoles == 1:
            print(
                "Simulating 1 dipole in each vertex of the right hemisphere.")

            for rh in range(0, len(rh_labels.values)):
                template = sim_stc.copy()
                #lh_labels2 = lh_labels
                #lh_labels2.values[rh] = 1.
                rh_labels2 = rh_labels
                rh_labels2.values[rh] = 1.
                sim_stc2 = simulate_sparse_stc(
                    self.src,
                    n_dipoles=1,
                    times=times[s:s + self.times_per_dipole],
                    data_fun=self.data_func,
                    labels=[rh_labels2])
                # TODO: use labels for deterministically simulating all vertices
                sim_stc.expand(sim_stc2.vertices), sim_stc2.expand(
                    sim_stc.vertices), template.expand(sim_stc2.vertices)
                template.data[:, :] = expand_sim_stc_data(
                    times, s, sim_stc2.data, self.times_per_dipole)
                sim_stc += template
                s += self.times_per_dipole

        else:
            print("Simulating up to %s dipoles on the right hemisphere." %
                  self.n_dipoles)

            for t in range(0, len(times), self.times_per_dipole):
                template = sim_stc.copy()

                dipoles = random.randint(2, self.n_dipoles)

                label_indices = sorted(
                    random.sample(range(0, len(rh_labels)), dipoles))
                chosen_label = rh_labels
                chosen_labels = []

                for l in label_indices:

                    chosen_label.values[l] = 1.
                    chosen_labels.append(chosen_label)
                    chosen_label.values[l] = 0.

                # TODO there may be a bug in MNE-Python 0.14.1 where forward model dipoles are mapped to non-unique
                # source space vertices, e.g. in this case rh vertices 298 and 411 both map to source space vertex
                # 112071 which raises an error in source_estimate.py at line 434

                try:
                    sim_stc2 = simulate_sparse_stc(
                        self.src,
                        n_dipoles=dipoles,
                        times=times[t:t + self.times_per_dipole],
                        data_fun=self.data_func,
                        labels=chosen_labels)
                except ValueError:
                    s -= self.times_per_dipole
                    continue
                sim_stc.expand(sim_stc2.vertices), sim_stc2.expand(
                    sim_stc.vertices), template.expand(sim_stc2.vertices)
                template.data[:, :] = expand_sim_stc_data(
                    times, t, sim_stc2.data, self.times_per_dipole)
                sim_stc.data[:, :] = sim_stc.data + template.data
                t += 1
                if s >= self.n_simulations:
                    break

        # Remove unnecessary zeros from series
        # sim_stc._data = sim_stc.data[:, ~np.all(abs(sim_stc.data) < 1e-20, axis=0)]  # TODO: add some zeroes to compensate?
        #sim_stc.times = sim_stc.times[:sim_stc.data.shape[1]]

        #template_times = np.arange(sim_stc.data.shape[1]) * (t_delta * (1 + self.empty_signal))
        #data_template = data_template.crop(0., template_times[-1])

        self.sim_data = simulate_raw(data_template,
                                     sim_stc,
                                     self.trans_path,
                                     self.src,
                                     self.bem_path,
                                     cov='simple',
                                     iir_filter=self.iir_filter,
                                     ecg=self.ecg,
                                     blink=self.blink,
                                     n_jobs=CPU_THREADS,
                                     verbose=self.verbose,
                                     use_cps=True)
        # self.cov = self.compute_covariance(self.simulate_evokeds(self.sim_data))
        self.sim_data = self.raw_preprocessing(self.sim_data)
        self.sim_stc = sim_stc
        self.cov = mne.compute_raw_covariance(self.sim_data,
                                              reject=self.reject,
                                              n_jobs=CPU_THREADS)
        return self.sim_data, self.sim_stc
    w_vertices = PGi_label_lh.vertices

    temp = PGs_label_lh.vertices
    w_vertices = np.unique(np.append(w_vertices, temp))
    """ V1 """
    v1_vertices = V1_label_lh.vertices

    os.chdir(os.path.join(raw_dir, session1[n]))
    os.chdir('inverse')

    fn = 'Conditions_40-sss_eq_' + session1[n] + '-ave.fif'
    fn_inv = session1[n] + '-inv.fif'
    inv = mne.minimum_norm.read_inverse_operator(fn_inv, verbose=None)

    temp_frontal_label_lh = mne.Label(broca_vertices,
                                      hemi='lh',
                                      subject=subs[n])
    temp_temporal_label_lh = mne.Label(w_vertices, hemi='lh', subject=subs[n])
    temp_ventral_label_lh = mne.Label(ventral_vertices,
                                      hemi='lh',
                                      subject=subs[n])
    temp_v1_label_lh = mne.Label(v1_vertices, hemi='lh', subject=subs[n])

    frontal_label_lh = temp_frontal_label_lh.morph(subject_from='fsaverage',
                                                   subject_to=subs[n],
                                                   subjects_dir=fs_dir,
                                                   n_jobs=18)
    temporal_label_lh = temp_temporal_label_lh.morph(subject_from='fsaverage',
                                                     subject_to=subs[n],
                                                     subjects_dir=fs_dir,
                                                     n_jobs=18)
Example #13
0
        # compute ROI level envelop power
        aec = envelope_correlation(label_ts)
        assert aec.shape == (len(rois), len(rois))
        # compute ROI laplacian as per Ginset 
        # TODO cite paper
        _, deg_lap = csgraph.laplacian(aec, return_diag=True)
            data[si, ix] = deg_lap 
        
        # compute ROI degree
        degree = mne.connectivity.degree(aec, threshold_prop=0.2)
        # if not np.allclose(deg_lap, degree):
        #     warnings.warn("mne.connectivity.degree NOT equal to laplacian")
        stc = mne.labels_to_stc(rois, degree)
        stc = stc.in_label(
            mne.Label(inv["src"][0]["vertno"], hemi="lh")
            + mne.Label(inv["src"][1]["vertno"], hemi="rh")
        )
        morph = mne.compute_source_morph(
            stc,
            subject_from=None,
            subject_to="fsaverage",
            subjects_dir=defaults.subjects_dir,
        )
        a_lst[kk].append(morph.apply(stc))
    A_lst.append(a_lst)

arrayostcs = np.array([[v[ii] for ii in defaults.bands] for v in A_lst]).squeeze()
src = mne.read_source_spaces(
    op.join(defaults.subjects_dir, "fsaverage", "bem", "fsaverage-oct6-src.fif")
)
Example #14
0
    vertnos = used_nodes[used_vertnos]
    return vertnos


closes_vertnos = closest_nodes(stim_coords['surf'], surf[0], fwd['src'][0]['vertno'])
stim_vert = surf[0][closes_vertnos]

pulse = 5e-6
def data_fun(times):
    data = np.hstack((np.repeat(0, 1000), -pulse, -pulse, pulse, pulse, np.repeat(0, 997)))
    return data

times = np.linspace(-0.5, 0.5, 2001)


lab = mne.Label(closes_vertnos, subject=subj,
                pos=stim_vert, hemi='lh', name='stim')


stc = simulate_sparse_stc(fwd1['src'], n_dipoles=1, times=times, location='center', subject=subj,
                          subjects_dir=subjects_dir, random_state=42, labels=[lab], data_fun=data_fun)


plot_sparse_source_estimates(fwd1['src'], stc, high_resolution=True)

fwd['src'].plot(trans=trans, subjects_dir=subjects_dir)

info['sfreq'] = 2000
#iir_filter = fit_iir_model_raw(raw, order=5, picks=picks, tmin=60, tmax=180)[1]
nave = 30  # simulate average of 100 epochs
evoked_sim = simulate_evoked(fwd, stc, info, cov, nave=nave, use_cps=True,
                             iir_filter=None)