コード例 #1
0
    def apply_cortical_parcellation_event_stcs(self,
                                               stcs,
                                               src,
                                               save=True,
                                               gen_mode=True):

        labels = mne.read_labels_from_annot(self.subject)
        self.labels = [lbl for lbl in labels if lbl.name != 'unknown-lh']
        stc_path = 'stcs/'
        self.stc_cp = dict()

        for key, event_stcs in stcs.items():
            stc_sub_path = stc_path + key + '/'
            event_stcs_cp = np.zeros((68, 500, 5))
            for event_id, event_stc in event_stcs.items():
                event_stc_path = stc_sub_path + event_id + '.csv'
                if gen_mode:
                    label_tc = mne.extract_label_time_course(event_stc,
                                                             self.labels,
                                                             src,
                                                             mode='pca_flip')
                else:
                    label_tc = np.genfromtxt(event_stc_path, delimiter=',')
                event_stcs_cp[:, :, int(event_id) - 1] = label_tc
                if save:
                    np.savetxt(event_stc_path, label_tc, delimiter=',')
            self.stc_cp[key] = event_stcs_cp
        return self.stc_cp
コード例 #2
0
def sources_to_labels(stcs,
                      age=None,
                      template=None,
                      parc='aparc',
                      mode='mean_flip',
                      allow_empty=True,
                      return_generator=False,
                      subjects_dir=None,
                      include_vol_src=True):
    template = __validate_template__(age, template, subjects_dir)
    montage, trans, bem_model, bem_solution, src = get_bem_artifacts(
        template, subjects_dir=subjects_dir, include_vol_src=include_vol_src)

    labels_parc = mne.read_labels_from_annot(template,
                                             subjects_dir=subjects_dir,
                                             parc=parc)
    labels_ts = mne.extract_label_time_course(
        stcs,
        labels_parc,
        src,
        mode=mode,
        allow_empty=allow_empty,
        return_generator=return_generator)

    if include_vol_src:
        labels_aseg = mne.get_volume_labels_from_src(src, template,
                                                     subjects_dir)
        labels = labels_parc + labels_aseg
    else:
        labels = labels_parc

    return labels_ts, labels
コード例 #3
0
def cal_labelts(stcs_path, fn_func_list, condition='LLst',
                min_subject='fsaverage', subjects_dir=None):
    '''
    Extract stcs from special ROIs, and store them for funther causality
    analysis.

    Parameter
    ---------
    stcs_path: string
        The path of stc's epochs.
    fn_ana_list: string
        The path of the file including pathes of functional labels.
    condition: string
        The condition for experiments.
    min_subject: the subject for common brain
    '''
    path_list = get_files_from_list(stcs_path)
    minpath = subjects_dir + '/%s' % (min_subject)
    srcpath = minpath + '/bem/fsaverage-ico-5-src.fif'
    src_inv = mne.read_source_spaces(srcpath)
    # loop across all filenames
    for stcs_path in path_list:
        caupath = stcs_path[:stcs_path.rfind('/%s' % condition)]
        fn_stcs_labels = caupath + '/%s_labels_ts.npy' % (condition)
        _, _, files = os.walk(stcs_path).next()
        trials = len(files) / 2
        # Get unfiltered and morphed stcs
        stcs = []
        i = 0
        while i < trials:
            fn_stc = stcs_path + 'trial%s_fsaverage' % (str(i))
            stc = mne.read_source_estimate(fn_stc + '-lh.stc',
                                           subject=min_subject)
            stcs.append(stc)
            i = i + 1
        # Get common labels
        list_file = fn_func_list
        with open(list_file, 'r') as fl:
                  file_list = [line.rstrip('\n') for line in fl]
        fl.close()
        rois = []
        labels = []
        for f in file_list:
            label = mne.read_label(f)
            labels.append(label)
            rois.append(label.name)
        # Extract stcs in common labels
        label_ts = mne.extract_label_time_course(stcs, labels, src_inv,
                                                 mode='pca_flip')
        # make label_ts's shape as (sources, samples, trials)
        label_ts = np.asarray(label_ts).transpose(1, 2, 0)
        np.save(fn_stcs_labels, label_ts)
コード例 #4
0
def epochs_to_labels_mne(epochs,
                         labels,
                         inv,
                         lambda2=1.0 / (3.0**2),
                         method='MNE',
                         mode='pca_flip'):
    src = inv['src']
    stcs = mne.minimum_norm.apply_inverse_epochs(epochs,
                                                 inv,
                                                 lambda2,
                                                 method,
                                                 return_generator=True)
    labels_data = mne.extract_label_time_course(stcs, labels, src, mode=mode)
    labels_data = np.array(labels_data)
    labels_data = np.transpose(labels_data, (1, 2, 0))
    return labels_data
コード例 #5
0
def source_space_connectivity(sub, parcellation, target_labels, inverse_method, lambda2, con_methods,
                              con_fmin, con_fmax, n_jobs, enable_ica):
    info = sub.load_info()
    if enable_ica:
        all_epochs = sub.load_ica_epochs()
    else:
        all_epochs = sub.load_epochs()
    inverse_operator = sub.load_inverse_operator()
    src = inverse_operator['src']

    # Turn checked-dict into list
    selected_con_methods = [m for m in con_methods if con_methods[m]]

    con_dict = {}
    for trial in all_epochs.event_id:
        con_dict[trial] = {}
        epochs = all_epochs[trial]
        # Compute inverse solution and for each epoch. By using "return_generator=True"
        # stcs will be a generator object instead of a list.
        stcs = mne.minimum_norm.apply_inverse_epochs(epochs, inverse_operator, lambda2, inverse_method,
                                                     pick_ori="normal", return_generator=True)

        # Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
        labels = mne.read_labels_from_annot(sub.subtomri, parc=parcellation,
                                            subjects_dir=sub.subjects_dir)

        actual_labels = [lb for lb in labels if lb.name in target_labels]

        # Average the source estimates within each label using sign-flips to reduce
        # signal cancellations, also here we return a generator

        label_ts = mne.extract_label_time_course(stcs, actual_labels,
                                                 src, mode='mean_flip',
                                                 return_generator=True)

        sfreq = info['sfreq']  # the sampling frequency
        con, freqs, times, n_epochs, n_tapers = mne.connectivity.spectral_connectivity(
                label_ts, method=selected_con_methods, mode='multitaper', sfreq=sfreq, fmin=con_fmin,
                fmax=con_fmax, faverage=True, mt_adaptive=True, n_jobs=n_jobs)

        # con is a 3D array, get the connectivity for the first (and only) freq. band
        # for each con_method
        con_dict = dict()
        for con_method, c in zip(selected_con_methods, con):
            con_dict[trial][con_method] = c

    sub.save_connectivity(con_dict)
コード例 #6
0
def _compute_mean_ROIs(stc, sbj_id, subjects_dir, parc, inverse_operator,
                       forward, aseg, is_fixed):
    # these coo are in MRI space and we have to convert them to MNI space
    labels_cortex = mne.read_labels_from_annot(sbj_id,
                                               parc=parc,
                                               subjects_dir=subjects_dir)

    print(('\n*** %d ***\n' % len(labels_cortex)))

    src = inverse_operator['src']

    # allow_empty : bool -> Instead of emitting an error, return all-zero time
    # courses for labels that do not have any vertices in the source estimate

    if is_fixed:
        mode = 'mean_flip'
    else:
        mode = 'mean'

    label_ts = mne.extract_label_time_course(stc,
                                             labels_cortex,
                                             src,
                                             mode=mode,
                                             allow_empty=True,
                                             return_generator=False)

    # save results in .npy file that will be the input for spectral node
    print('\n*** SAVE ROI TS ***\n')
    print((len(label_ts)))

    if aseg:
        print(sbj_id)
        labels_aseg = get_volume_labels_from_src(src, sbj_id, subjects_dir)
        labels = labels_cortex + labels_aseg
    else:
        labels = labels_cortex
        labels_aseg = None

    print((labels[0].pos))
    print((len(labels)))

    labels_file, label_names_file, label_coords_file = \
        _create_MNI_label_files(forward, labels_cortex, labels_aseg,
                                sbj_id, subjects_dir)

    return label_ts, labels_file, label_names_file, label_coords_file
コード例 #7
0
ファイル: Avnielish.py プロジェクト: gsudre/research_code
    picks = mne.fiff.pick_channels_regexp(raw.info["ch_names"], "M..-*")
    raw.filter(l_freq=1, h_freq=50, picks=picks)
    er_raw.filter(l_freq=1, h_freq=50, picks=picks)

    noise_cov = mne.compute_raw_data_covariance(er_raw)
    # note that MNE reads CTF data as magnetometers!
    noise_cov = mne.cov.regularize(noise_cov, raw.info, mag=noise_reg)
    inverse_operator = mne.minimum_norm.make_inverse_operator(raw.info, forward, noise_cov, loose=0.2, depth=0.8)
    data, time = raw[0, :]  #
    events = fg.get_good_events(markers[subj], time, window_length)

    epochs = mne.Epochs(raw, events, None, 0, window_length, preload=True, baseline=None, detrend=0, picks=picks)
    stcs = mne.minimum_norm.apply_inverse_epochs(epochs, inverse_operator, lambda2, "MNE", return_generator=False)

    labels, label_colors = mne.labels_from_parc(subj, parc="aparc")
    label_ts = mne.extract_label_time_course(stcs, labels, forward["src"], mode=label_mode)

    # label_data is nlabels by time, so here we can use whatever connectivity method we fancy
    con, freqs, times, n_epochs, n_tapers = mne.connectivity.spectral_connectivity(
        label_ts,
        method=method,
        mode="multitaper",
        sfreq=raw.info["sfreq"],
        fmin=[1, 4, 8, 13, 30],
        fmax=[4, 8, 13, 30, 50],
        faverage=True,
        n_jobs=3,
        mt_adaptive=False,
    )
    np.save(dir_out + subj + "-" + label_mode + "-" + "-".join(method), con)
コード例 #8
0
##############################################################################
# Compute label time series and do envelope correlation
# -----------------------------------------------------

labels = mne.read_labels_from_annot(subject,
                                    'aparc_sub',
                                    subjects_dir=subjects_dir)
epochs.apply_hilbert()  # faster to apply in sensor space
stcs = apply_inverse_epochs(epochs,
                            inv,
                            lambda2=1. / 9.,
                            pick_ori='normal',
                            return_generator=True)
label_ts = mne.extract_label_time_course(stcs,
                                         labels,
                                         inv['src'],
                                         return_generator=True)
corr = envelope_correlation(label_ts, verbose=True)

# let's plot this matrix
fig, ax = plt.subplots(figsize=(4, 4))
ax.imshow(corr, cmap='viridis', clim=np.percentile(corr, [5, 95]))
fig.tight_layout()

##############################################################################
# Compute the degree and plot it
# ------------------------------

# sphinx_gallery_thumbnail_number = 2
threshold_prop = 0.15  # percentage of strongest edges to keep in the graph
degree = mne.connectivity.degree(corr, threshold_prop=threshold_prop)
コード例 #9
0
snr = 1.
lambda2 = 1. / snr**2

labels = mne.read_labels_from_annot(
    subject=subject, parc="PALS_B12_Brodmann", regexp="Brodmann")

condition = "interupt"

inv = read_inverse_operator(mne_folder + "%s_%s-inv.fif" % (subject, condition
                                                            ))
epochs = mne.read_epochs(epochs_folder + "%s_%s-epo.fif" % (subject, condition
                                                            ))
# epochs.resample(500)

stcs = apply_inverse_epochs(
    epochs["press"], inv, lambda2, method=method, pick_ori=None)
ts = [
    mne.extract_label_time_course(
        stc, labels, inv["src"], mode="mean_flip") for stc in stcs
]

# for h, tc in enumerate(ts):
#     for j, t in enumerate(tc):
#         t *= np.sign(t[np.argmax(np.abs(t))])
#         tc[j, :] = t
#     ts[h] = tc

ts = np.asarray(ts)
stc.save(source_folder + "%s_%s_epo" % (subject, condition))
np.save(source_folder + "ave_ts/%s_%s_ts-epo.npy" % (subject, condition), ts)
コード例 #10
0
def compute_rois_inv_sol(raw_filename,
                         sbj_id,
                         sbj_dir,
                         fwd_filename,
                         cov_fname,
                         is_epoched=False,
                         events_id=[],
                         t_min=None,
                         t_max=None,
                         is_evoked=False,
                         snr=1.0,
                         inv_method='MNE',
                         parc='aparc',
                         aseg=False,
                         aseg_labels=[],
                         save_stc=False,
                         is_fixed=False):
    """Compute the inverse solution on raw/epoched data.

    This function return the average time series computed in the N_r regions of
    the source space defined by the specified cortical parcellation

    Parameters
    ----------
    raw_filename : str
        filename of the raw/epoched data
    sbj_id : str
        subject name
    sbj_dir : str
        Freesurfer directory
    fwd_filename : str
        filename of the forward operator
    cov_filename : str
        filename of the noise covariance matrix
    is_epoched : bool
        if True and events_id = None the input data are epoch data
        in the format -epo.fif
        if True and events_id is not None, the raw data are epoched
        according to events_id and t_min and t_max values
    events_id: dict
        the dict of events
    t_min, t_max: int
        define the time interval in which to epoch the raw data
    is_evoked: bool
        if True the raw data will be averaged according to the events
        contained in the dict events_id
    inv_method : str
        the inverse method to use; possible choices: MNE, dSPM, sLORETA
    snr : float
        the SNR value used to define the regularization parameter
    parc: str
        the parcellation defining the ROIs atlas in the source space
    aseg: bool
        if True a mixed source space will be created and the sub cortical
        regions defined in aseg_labels will be added to the source space
    aseg_labels: list
        list of substructures we want to include in the mixed source space
    save_stc: bool
        if True the stc will be saved

    Returns
    -------
    ts_file : str
        filename of the file where are saved the ROIs time series
    labels_file : str
        filename of the file where are saved the ROIs of the parcellation
    label_names_file : str
        filename of the file where are saved the name of the ROIs of the
        parcellation
    label_coords_file : str
        filename of the file where are saved the coordinates of the
        centroid of the ROIs of the parcellation
    """
    import os.path as op
    import numpy as np
    import mne

    from mne.io import read_raw_fif
    from mne import read_epochs
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from mne.minimum_norm import apply_inverse_epochs, apply_inverse
    from mne import get_volume_labels_from_src

    from nipype.utils.filemanip import split_filename as split_f

    from ephypype.preproc import create_reject_dict
    from ephypype.source_space import create_mni_label_files

    try:
        traits.undefined(events_id)
    except NameError:
        events_id = None

    print(('\n*** READ raw filename %s ***\n' % raw_filename))
    if is_epoched and events_id is None:
        epochs = read_epochs(raw_filename)
        info = epochs.info
    else:
        raw = read_raw_fif(raw_filename, preload=True)
        #        raw.set_eeg_reference()
        info = raw.info

    subj_path, basename, ext = split_f(raw_filename)

    print(('\n*** READ noise covariance %s ***\n' % cov_fname))
    noise_cov = mne.read_cov(cov_fname)

    print(('\n*** READ FWD SOL %s ***\n' % fwd_filename))
    forward = mne.read_forward_solution(fwd_filename)

    if not aseg:
        print(('\n*** fixed orientation {} ***\n'.format(is_fixed)))
        forward = mne.convert_forward_solution(forward,
                                               surf_ori=True,
                                               force_fixed=is_fixed)

    lambda2 = 1.0 / snr**2

    # compute inverse operator
    print('\n*** COMPUTE INV OP ***\n')
    if is_fixed:
        loose = None
        depth = None
        pick_ori = None
    elif aseg:
        loose = 1
        depth = None
        pick_ori = None
    else:
        loose = 0.2
        depth = 0.8
        pick_ori = 'normal'

    print(('\n *** loose {}  depth {} ***\n'.format(loose, depth)))
    inverse_operator = make_inverse_operator(info,
                                             forward,
                                             noise_cov,
                                             loose=loose,
                                             depth=depth,
                                             fixed=is_fixed)

    # apply inverse operator to the time windows [t_start, t_stop]s
    print('\n*** APPLY INV OP ***\n')
    if is_epoched and events_id is not None:
        events = mne.find_events(raw)
        picks = mne.pick_types(info, meg=True, eog=True, exclude='bads')
        reject = create_reject_dict(info)

        if is_evoked:
            epochs = mne.Epochs(raw,
                                events,
                                events_id,
                                t_min,
                                t_max,
                                picks=picks,
                                baseline=(None, 0),
                                reject=reject)
            evoked = [epochs[k].average() for k in events_id]
            snr = 3.0
            lambda2 = 1.0 / snr**2

            ev_list = list(events_id.items())
            for k in range(len(events_id)):
                stc = apply_inverse(evoked[k],
                                    inverse_operator,
                                    lambda2,
                                    inv_method,
                                    pick_ori=pick_ori)

                print(('\n*** STC for event %s ***\n' % ev_list[k][0]))
                stc_file = op.abspath(basename + '_' + ev_list[k][0])

                print('***')
                print(('stc dim ' + str(stc.shape)))
                print('***')

                if not aseg:
                    stc.save(stc_file)

        else:
            epochs = mne.Epochs(raw,
                                events,
                                events_id,
                                t_min,
                                t_max,
                                picks=picks,
                                baseline=(None, 0),
                                reject=reject)
            stc = apply_inverse_epochs(epochs,
                                       inverse_operator,
                                       lambda2,
                                       inv_method,
                                       pick_ori=pick_ori)

            print('***')
            print(('len stc %d' % len(stc)))
            print('***')

    elif is_epoched and events_id is None:
        stc = apply_inverse_epochs(epochs,
                                   inverse_operator,
                                   lambda2,
                                   inv_method,
                                   pick_ori=pick_ori)
        print('***')
        print(('len stc %d' % len(stc)))
        print('***')
    else:
        stc = apply_inverse_raw(raw,
                                inverse_operator,
                                lambda2,
                                inv_method,
                                label=None,
                                start=None,
                                stop=None,
                                buffer_size=1000,
                                pick_ori=pick_ori)  # None 'normal'

        print('***')
        print(('stc dim ' + str(stc.shape)))
        print('***')

    if not isinstance(stc, list):
        stc = [stc]

    if save_stc:
        for i in range(len(stc)):
            stc_file = op.abspath(basename + '_stc_' + str(i) + '.npy')
            np.save(stc_file, stc[i].data)

    # these coo are in MRI space and we have to convert to MNI space
    labels_cortex = mne.read_labels_from_annot(sbj_id,
                                               parc=parc,
                                               subjects_dir=sbj_dir)

    print(('\n*** %d ***\n' % len(labels_cortex)))

    src = inverse_operator['src']

    # allow_empty : bool -> Instead of emitting an error, return all-zero time
    # courses for labels that do not have any vertices in the source estimate

    if is_fixed:
        mode = 'mean_flip'
    else:
        mode = 'mean'

    label_ts = mne.extract_label_time_course(stc,
                                             labels_cortex,
                                             src,
                                             mode=mode,
                                             allow_empty=True,
                                             return_generator=False)

    # save results in .npy file that will be the input for spectral node
    print('\n*** SAVE ROI TS ***\n')
    print((len(label_ts)))

    ts_file = op.abspath(basename + '_ROI_ts.npy')
    np.save(ts_file, label_ts)

    if aseg:
        print(sbj_id)
        labels_aseg = get_volume_labels_from_src(src, sbj_id, sbj_dir)
        labels = labels_cortex + labels_aseg
    else:
        labels = labels_cortex
        labels_aseg = None

    print((labels[0].pos))
    print((len(labels)))

    # labels_file, label_names_file, label_coords_file = \
    # create_label_files(labels)
    labels_file, label_names_file, label_coords_file = \
        create_mni_label_files(forward, labels_cortex, labels_aseg,
                               sbj_id, sbj_dir)

    return ts_file, labels_file, label_names_file, label_coords_file
# labels = mne.read_labels_from_annot('subject_1', parc='aparc.DKTatlas40',
#                                     subjects_dir=subjects_dir)

for cond in epochs.event_id.keys():
    stcs = apply_inverse_epochs(epochs[cond], inverse_operator, lambda2,
                                method, pick_ori="normal")
    exec("stcs_%s = stcs" % cond)

labels_name = [label.name for label in labels_occ]

for label in labels_occ:
    labels_name += [label.name]

# Extract  time series
ts_ctl_left = mne.extract_label_time_course(stcs_ctl_left,
                                            labels_occ,
                                            src=inverse_operator["src"],
                                            mode = "mean_flip")

ts_ent_left = mne.extract_label_time_course(stcs_ent_left,
                                            labels_occ,
                                            src=inverse_operator["src"],
                                            mode = "mean_flip")

stcs_all_left = stcs_ctl_left + stcs_ent_left
ts_all_left = np.asarray(mne.extract_label_time_course(stcs_all_left,
                                            labels_occ,
                                            src=inverse_operator["src"],
                                            mode = "mean_flip"))

number_of_permutations = 2000
index = np.arange(0, len(ts_all_left))
コード例 #12
0
                        preload=True,
                        baseline=None,
                        detrend=0,
                        picks=picks)
    stcs = mne.beamformer.lcmv_epochs(epochs,
                                      forward,
                                      noise_cov.as_diag(),
                                      data_cov,
                                      reg=data_reg,
                                      pick_ori='max-power')

    for net in avg_intersects:
        subj_labels = [label.morph('fsaverage', subj) for label in net]
        label_ts = mne.extract_label_time_course(stcs,
                                                 subj_labels,
                                                 forward['src'],
                                                 mode=label_mode,
                                                 allow_empty=True)
        con, freqs, times, n_epochs, n_tapers = mne.connectivity.spectral_connectivity(
            label_ts,
            method=method,
            mode='multitaper',
            sfreq=raw.info['sfreq'],
            fmin=[1, 4, 8, 13, 30],
            fmax=[4, 8, 13, 30, 50],
            faverage=True,
            n_jobs=3,
            mt_adaptive=False)
        np.save(
            dir_out + subj + '-' + net[0].name + '-' + label_mode + '-' +
            '-'.join(method), con)
コード例 #13
0
    for first in range(start, stop, step):
        last = first + step
        if last >= stop:
            last = stop
        raw_segment = raw_data[:, first:last]
        mu += raw_segment.sum(axis=1)
        data += np.dot(raw_segment, raw_segment.T)
        n_samples += raw_segment.shape[1]
    mu /= n_samples
    data -= n_samples * mu[:, None] * mu[None, :]
    data /= (n_samples - 1.0)
    ch_names = [raw.info['ch_names'][k] for k in picks]
    data_cov = mne.Covariance(None)
    data_cov.update(kind=mne.fiff.FIFF.FIFFV_MNE_NOISE_COV, diag=False, 
        dim=len(data), names=ch_names, data=data, 
        projs=cp.deepcopy(raw.info['projs']), bads=raw.info['bads'], 
        nfree=n_samples, eig=None, eigvec=None)

    noise_cov = mne.compute_raw_data_covariance(er_raw)
    # note that MNE reads CTF data as magnetometers!
    noise_cov = mne.cov.regularize(noise_cov, raw.info, mag=noise_reg)
    events = fg.get_good_events(markers[subj], time, window_length)

    epochs = mne.Epochs(raw, events, None, 0, window_length, preload=True, baseline=None, detrend=0, picks=picks)
    stcs = mne.beamformer.lcmv_epochs(epochs, forward, noise_cov.as_diag(), data_cov, reg=data_reg, pick_ori='max-power')

    for net in avg_intersects:
        subj_labels = [label.morph('fsaverage',subj) for label in net]
        label_ts = mne.extract_label_time_course(stcs, subj_labels, forward['src'], mode=label_mode, allow_empty=True)
        con, freqs, times, n_epochs, n_tapers = mne.connectivity.spectral_connectivity(label_ts, method=method, mode='multitaper', sfreq=raw.info['sfreq'], fmin=[1,4,8,13,30], fmax=[4,8,13,30,50], faverage=True, n_jobs=3, mt_adaptive=False)
        np.save(dir_out + subj + '-' + net[0].name + '-' + label_mode +'-' + '-'.join(method), con)
コード例 #14
0
                                    subjects_dir=mri_dir)

    # HG and STC labels
    rois = ['transversetemporal', 'superiortemporal']
    hemis = ['lh', 'rh']

    lbs = []
    for roi in rois:
        for hemi in hemis:
            lbs.append([
                label for label in labels
                if label.name == '%s-%s' % (roi, hemi)
            ][0])

    print("Extracting labels...")
    stcs = extract_label_time_course(stc_epochs, lbs, src)

    stc_arr = np.transpose(np.array(stcs), (1, 0, 2))

    print("Saving array!")
    # save as numpy array
    np.save(file=stc_fname, arr=stc_arr)

    print("Next subject!")

###############################################################################
##############  MAKE GRAND AVERAGE MOVIE OF SINGLE SUBJECT EVOKED #############
###############################################################################
snr = 3.0  # Standard assumption for average data
lambda2 = 1.0 / snr**2
コード例 #15
0
	inv = mne.minimum_norm.read_inverse_operator( participant + '/' + participant + '_fixed_inv.fif')

	# apply inverse to epochs #

	snr = 1.0   # Lower SNR for single trial data
	lambda2 = 1.0 / snr ** 2
	method = 'dSPM'              						# how do you want to apply the inverse solution? #
	pick_ori = None

	stcs = mne.minimum_norm.apply_inverse_epochs(epochs, inv, lambda2, method, pick_ori)

	# extract time course:

	label_dir = subjects_dir + "/labels/"
	label = mne.read_label( label_dir + "TTG-lh.label")
	src = inv['src']

	extract_time_course = mne.extract_label_time_course(stcs, label, src, mode = 'mean')


	# squeeze out the pesky third dimension
	squeezed = numpy.squeeze(extract_time_course)
	squeezed.shape


	# save to a numpy array on disk
	numpy.savetxt( by_trial_path + '/' + participant + '_' + experiment + "_TTG_epochs.csv", squeezed, delimiter=",")
	print participant + " done!!! congratulations!!! :)"

#and to load again::
#data = numpy.load(participant + '/' participant + '_' + experiment + "_epochs.npy")
コード例 #16
0
                                                    eog=150e-6))

# Compute inverse solution and for each epoch. Note that since we are passing
# the output to both extract_label_time_course and the phase_slope_index
# functions, we have to use "return_generator=False", since it is only possible
# to iterate over generators once.
snr = 1.0  # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=False)

# Now, we generate seed time series by averaging the activity in the left
# visual corex
label = mne.read_label(fname_label)
src = inverse_operator['src']  # the source space used
seed_ts = mne.extract_label_time_course(stcs, label, src, mode='mean_flip')

# Combine the seed time course with the source estimates. There will be a total
# of 7500 signals:
# index 0: time course extracted from label
# index 1..7499: dSPM source space time courses
comb_ts = zip(seed_ts, stcs)

# Construct indices to estimate connectivity between the label time course
# and all source space time courses
vertices = [src[i]['vertno'] for i in range(2)]
n_signals_tot = 1 + len(vertices[0]) + len(vertices[1])

indices = seed_target_indices([0], np.arange(1, n_signals_tot))

# Compute the PSI in the frequency range 8Hz..30Hz. We exclude the baseline
コード例 #17
0
ファイル: extract_ts.py プロジェクト: MadsJensen/Hyp_MEG_MNE
# %%
stcsNormal = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                method, pick_ori="normal",
                                return_generator=True)

# Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
labels = mne.read_labels_from_annot('subject_1', parc='aparc.DKTatlas40',
                                    subjects_dir=subjects_dir)


# Average the source estimates within each label using sign-flips to reduce
# signal cancellations, also here we return a generator
src = inverse_operator['src']
labelTsNormal = mne.extract_label_time_course(stcsNormal, labels, src,
                                            mode='mean_flip',
                                            return_generator=False)


# %%
from nitime import TimeSeries
from nitime.analysis import MTCoherenceAnalyzer
from nitime.viz import drawmatrix_channels

f_up = 13  # upper limit
f_lw = 8  # lower limit

cohMatrixNormal = np.empty([np.shape(labelTsNormal)[1], np.shape(labelTsNormal)[1],
                          np.shape(labelTsNormal)[0]])

labels_name = []
コード例 #18
0

# Load data
fname_inv = mne_folder + "%s-inv.fif" % subject
inv = mne.minimum_norm.read_inverse_operator(fname_inv)
fname_evoked = epochs_folder + "%s_filtered_ica_mc_tsss-ave.fif" % subject
evokeds = mne.read_evokeds(fname_evoked, baseline=(None, 0))
src = mne.read_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject)

for evk in evokeds:
    stc = apply_inverse(evk, inv, lambda2=lambda2,  
                        method=method)
    exec("stc_%s_%s = stc" % (subject, evk.comment))


# src = mne.read_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject)
labels = mne.read_labels_from_annot(subject, parc='PALS_B12_Lobes',
                                    # regexp="Bro",
                                    subjects_dir=subjects_dir)
labels_occ = [labels[9], labels[10], labels[9]+labels[10]]

lbl_ent_left = mne.extract_label_time_course(stc_0006_ent_left,
                                             labels=[labels[9]],
                                             src=src,
                                             mode="pca_flip")

lbl_ctl_left = mne.extract_label_time_course(stc_0006_ctl_left,
                                             labels=[labels[9]],
                                             src=src,
                                                 mode="pca_flip")
コード例 #19
0
    epochs = mne.read_epochs(epochs_folder +
                             "%s_ds_filtered_ica_mc_tsss-epo.fif" % subject)
    # epochs.resample(250, n_jobs=4)

    for condition in conditions:
        stcs = apply_inverse_epochs(epochs[condition],
                                    inverse_operator,
                                    lambda2,
                                    method,
                                    pick_ori="normal")

        for label in labels_occ:
            label_ts = []
            for j in range(len(stcs)):
                label_ts.append(mne.extract_label_time_course(stcs[j],
                                                              labels=label,
                                                              src=src,
                                                              mode="mean_flip"))

            label_ts = np.squeeze(np.asarray(label_ts))

            tfr = cwt_morlet(label_ts, epochs.info["sfreq"], freqs,
                             use_fft=True, n_cycles=n_cycle)

            np.save(tf_folder + "%s_%s_%s_MNE-tfr" % (subject, condition,
                                                      label.name),
                    tfr)

        del stcs
        del tfr

    del epochs
コード例 #20
0
ファイル: get_roi.py プロジェクト: keenieayla/nfb-1
                            inv_method,
                            pick_ori=None,
                            return_generator=True)

# Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
labels_parc = mne.read_labels_from_annot(subject,
                                         parc=parc,
                                         subjects_dir=subjects_dir)

# Average the source estimates within each label of the cortical parcellation
# and each sub structures contained in the src space
# If mode = 'mean_flip' this option is used only for the cortical label
src = inverse_operator['src']
label_ts = mne.extract_label_time_course(stcs,
                                         labels_parc,
                                         src,
                                         mode='mean_flip',
                                         allow_empty=True,
                                         return_generator=False)

# We compute the connectivity in the alpha band and plot it using a circular
# graph layout
fmin = 8.
fmax = 13.
sfreq = raw.info['sfreq']  # the sampling frequency
con, freqs, times, n_epochs, n_tapers = spectral_connectivity(
    label_ts,
    method='pli',
    mode='multitaper',
    sfreq=sfreq,
    fmin=fmin,
    fmax=fmax,
コード例 #21
0
    src = inverse_operator['src'] 
    
    snr = 1.0  # use lower SNR for single epochs
    lambda2 = 1.0 / snr ** 2
    
    # Read labels for V1 and MT 
    v1_label = mne.read_label(datapath + 'Results_Alpha_and_Gamma/' + subject + '/' + subject + '_V1_rh.label')
    #v4_label = mne.read_label(datapath + 'Results_Alpha_and_Gamma/' + subject + '/' + subject + '_V4_rh.label')
    mt_label = mne.read_label(datapath + 'Results_Alpha_and_Gamma/' + subject + '/' + subject + '_MT_rh.label')

    # Compute inverse solution for each epochs 
    stcs_fast = apply_inverse_epochs(allepochs, inverse_operator, lambda2, method='sLORETA',
                                     pick_ori="normal")
    
    # Extract time courses from vertices
    seed_ts_fast_v1 = mne.extract_label_time_course(stcs_fast, v1_label, src, mode='mean_flip', verbose='error')
    #seed_ts_fast_v4 = mne.extract_label_time_course(stcs_fast, v4_label, src, mode='mean_flip', verbose='error')
    seed_ts_fast_mt = mne.extract_label_time_course(stcs_fast, mt_label, src, mode='mean_flip', verbose='error')

    comb_ts_fast = list(zip(seed_ts_fast_v1, seed_ts_fast_mt))
    sfreq = allepochs.info['sfreq'] 
    
    # Create signals input
    datarray = np.asarray(comb_ts_fast)
    signal = np.transpose(datarray.mean(2),(2,0,1)) #(401,78,2))
    
    # Compute granger causality
    m = Multitaper(signal, sfreq, time_halfbandwidth_product=2, start_time=-0.8, n_tapers=1)
    c = Connectivity(fourier_coefficients=m.fft(), frequencies=m.frequencies)
    granger = c.pairwise_spectral_granger_prediction()
    
コード例 #22
0
def _compute_power_envelopes(subject, kind, freqs):

    ###########################################################################
    # Compute source space
    # -------------------
    src = mne.setup_source_space(subject,
                                 spacing='oct6',
                                 add_dist=False,
                                 subjects_dir=cfg.mne_camcan_freesurfer_path)
    trans = trans_map[subject]
    bem = cfg.mne_camcan_freesurfer_path + \
        "/%s/bem/%s-meg-bem.fif" % (subject, subject)

    ###########################################################################
    # Compute handle MEG data
    # -----------------------

    fname = op.join(cfg.camcan_meg_raw_path, subject, kind,
                    '%s_raw.fif' % kind)

    raw = mne.io.read_raw_fif(fname)
    mne.channels.fix_mag_coil_types(raw.info)
    if DEBUG:
        # raw.crop(0, 180)
        raw.crop(0, 120)
    else:
        raw.crop(0, 300)

    raw = _run_maxfilter(raw, subject, kind)
    _compute_add_ssp_exg(raw)

    # get empty room
    fname_er = op.join(cfg.camcan_meg_path, "emptyroom", subject,
                       "emptyroom_%s.fif" % subject)

    raw_er = mne.io.read_raw_fif(fname_er)
    mne.channels.fix_mag_coil_types(raw.info)

    raw_er = _run_maxfilter(raw_er, subject, kind, coord_frame="meg")
    raw_er.info["projs"] += raw.info["projs"]

    cov = mne.compute_raw_covariance(raw_er, method='oas')
    # compute before band-pass of interest

    event_length = 5.
    event_overlap = 0.
    raw_length = raw.times[-1]
    events = mne.make_fixed_length_events(raw,
                                          duration=event_length,
                                          start=0,
                                          stop=raw_length - event_length)

    #######################################################################
    # Compute the forward and inverse
    # -------------------------------

    info = mne.Epochs(raw,
                      events=events,
                      tmin=0,
                      tmax=event_length,
                      baseline=None,
                      reject=None,
                      preload=False,
                      decim=10).info
    fwd = mne.make_forward_solution(info, trans, src, bem)
    inv = make_inverse_operator(info, fwd, cov)
    del fwd

    #######################################################################
    # Compute label time series and do envelope correlation
    # -----------------------------------------------------
    mne_subjects_dir = "/storage/inria/agramfor/MNE-sample-data/subjects"
    labels = mne.read_labels_from_annot('fsaverage',
                                        'aparc_sub',
                                        subjects_dir=mne_subjects_dir)
    labels = mne.morph_labels(labels,
                              subject_from='fsaverage',
                              subject_to=subject,
                              subjects_dir=cfg.mne_camcan_freesurfer_path)
    labels = [ll for ll in labels if 'unknown' not in ll.name]

    results = dict()
    for fmin, fmax, band in freqs:
        print(f"computing {subject}: {fmin} - {fmax} Hz")
        this_raw = raw.copy()
        this_raw.filter(fmin, fmax, n_jobs=1)
        reject = _get_global_reject_epochs(this_raw, decim=5)

        this_raw.apply_hilbert(envelope=False)

        epochs = mne.Epochs(this_raw,
                            events=events,
                            tmin=0,
                            tmax=event_length,
                            baseline=None,
                            reject=reject,
                            preload=True,
                            decim=5)
        if DEBUG:
            epochs = epochs[:3]

        result = {
            'subject': subject,
            'fmin': fmin,
            'fmax': fmax,
            'band': band,
            'label_names': [ll.name for ll in labels]
        }

        stcs = apply_inverse_epochs(epochs,
                                    inv,
                                    lambda2=1. / 9.,
                                    pick_ori='normal',
                                    method='MNE',
                                    return_generator=True)

        label_ts = np.concatenate(mne.extract_label_time_course(
            stcs, labels, inv['src'], mode="pca_flip", return_generator=False),
                                  axis=-1)

        result['cov'], _ = oas(np.abs(label_ts).T, assume_centered=False)

        for orth in ("pairwise", False):
            corr = envelope_correlation(label_ts[np.newaxis],
                                        combine="mean",
                                        orthogonalize=orth)
            result[f"corr{'_orth' if orth else ''}"] = corr[np.triu_indices(
                len(corr))]

        results[band] = result

        if False:  # failsafe mode with intermediate steps written out
            out_fname = op.join(
                cfg.derivative_path,
                f'{subject + ("-debug" if DEBUG else "")}_'
                f'power_envelopes_{band}.h5')

            mne.externals.h5io.write_hdf5(out_fname, result, overwrite=True)
    return results
コード例 #23
0
ファイル: script.py プロジェクト: kalenkovich/rteegvis
                                                    eog=150e-6))

# Compute inverse solution and for each epoch. Note that since we are passing
# the output to both extract_label_time_course and the phase_slope_index
# functions, we have to use "return_generator=False", since it is only possible
# to iterate over generators once.
snr = 1.0  # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=False)

# Now, we generate seed time series by averaging the activity in the left
# visual corex
label = mne.read_label(fname_label)
src = inverse_operator['src']  # the source space used
seed_ts = mne.extract_label_time_course(stcs, label, src, mode='mean_flip')

# Combine the seed time course with the source estimates. There will be a total
# of 7500 signals:
# index 0: time course extracted from label
# index 1..7499: dSPM source space time courses
comb_ts = zip(seed_ts, stcs)

# Construct indices to estimate connectivity between the label time course
# and all source space time courses
vertices = [src[i]['vertno'] for i in range(2)]
n_signals_tot = 1 + len(vertices[0]) + len(vertices[1])

indices = seed_target_indices([0], np.arange(1, n_signals_tot))

# Compute the PSI in the frequency range 8Hz..30Hz. We exclude the baseline
コード例 #24
0
def test_extract_label_time_course():
    """Test extraction of label time courses from stc
    """
    n_stcs = 3
    n_times = 50

    src = read_inverse_operator(fname_inv)['src']
    vertices = [src[0]['vertno'], src[1]['vertno']]
    n_verts = len(vertices[0]) + len(vertices[1])

    # get some labels
    labels_lh = read_labels_from_annot('sample',
                                       hemi='lh',
                                       subjects_dir=subjects_dir)
    labels_rh = read_labels_from_annot('sample',
                                       hemi='rh',
                                       subjects_dir=subjects_dir)
    labels = list()
    labels.extend(labels_lh[:5])
    labels.extend(labels_rh[:4])

    n_labels = len(labels)

    label_means = np.arange(n_labels)[:, None] * np.ones((n_labels, n_times))
    label_maxs = np.arange(n_labels)[:, None] * np.ones((n_labels, n_times))

    # compute the mean with sign flip
    label_means_flipped = np.zeros_like(label_means)
    for i, label in enumerate(labels):
        label_means_flipped[i] = i * np.mean(label_sign_flip(label, src))

    # generate some stc's with known data
    stcs = list()
    for i in range(n_stcs):
        data = np.zeros((n_verts, n_times))
        # set the value of the stc within each label
        for j, label in enumerate(labels):
            if label.hemi == 'lh':
                idx = np.intersect1d(vertices[0], label.vertices)
                idx = np.searchsorted(vertices[0], idx)
            elif label.hemi == 'rh':
                idx = np.intersect1d(vertices[1], label.vertices)
                idx = len(vertices[0]) + np.searchsorted(vertices[1], idx)
            data[idx] = label_means[j]

        this_stc = SourceEstimate(data, vertices, 0, 1)
        stcs.append(this_stc)

    # test some invalid inputs
    assert_raises(ValueError,
                  extract_label_time_course,
                  stcs,
                  labels,
                  src,
                  mode='notamode')

    # have an empty label
    empty_label = labels[0].copy()
    empty_label.vertices += 1000000
    assert_raises(ValueError,
                  extract_label_time_course,
                  stcs,
                  empty_label,
                  src,
                  mode='mean')

    # but this works:
    tc = extract_label_time_course(stcs,
                                   empty_label,
                                   src,
                                   mode='mean',
                                   allow_empty=True)
    for arr in tc:
        assert_true(arr.shape == (1, n_times))
        assert_array_equal(arr, np.zeros((1, n_times)))

    # test the different modes
    modes = ['mean', 'mean_flip', 'pca_flip', 'max']

    for mode in modes:
        label_tc = extract_label_time_course(stcs, labels, src, mode=mode)
        label_tc_method = [
            stc.extract_label_time_course(labels, src, mode=mode)
            for stc in stcs
        ]
        assert_true(len(label_tc) == n_stcs)
        assert_true(len(label_tc_method) == n_stcs)
        for tc1, tc2 in zip(label_tc, label_tc_method):
            assert_true(tc1.shape == (n_labels, n_times))
            assert_true(tc2.shape == (n_labels, n_times))
            assert_true(np.allclose(tc1, tc2, rtol=1e-8, atol=1e-16))
            if mode == 'mean':
                assert_array_almost_equal(tc1, label_means)
            if mode == 'mean_flip':
                assert_array_almost_equal(tc1, label_means_flipped)
            if mode == 'max':
                assert_array_almost_equal(tc1, label_maxs)

    # test label with very few vertices (check SVD conditionals)
    label = Label(vertices=src[0]['vertno'][:2], hemi='lh')
    x = label_sign_flip(label, src)
    assert_true(len(x) == 2)
    label = Label(vertices=[], hemi='lh')
    x = label_sign_flip(label, src)
    assert_true(x.size == 0)
コード例 #25
0
def SN_functional_connectivity_betweenROIs(i, method):
    s = time.time()
    meg = subjects[i]
    sub_to = MRI_sub[i][1:15]
    con_SD_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/con_labels_' + method + '_bands_SD_sub' + str(
        i) + '.json'
    con_LD_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/con_labels_' + method + '_bands_LD_sub' + str(
        i) + '.json'

    morphed_labels = mne.morph_labels(SN_ROI,subject_to=data_path+sub_to,\
                  subject_from='fsaverage',subjects_dir=data_path)

    # Reading epochs
    epo_name_SD = data_path + meg + 'block_SD_words_epochs-epo.fif'
    epo_name_LD = data_path + meg + 'block_LD_words_epochs-epo.fif'

    epochs_sd = mne.read_epochs(epo_name_SD, preload=True)
    epochs_ld = mne.read_epochs(epo_name_LD, preload=True)

    epochs_SD = epochs_sd['words'].copy().resample(500)
    epochs_LD = epochs_ld['words'].copy().resample(500)

    # Equalize trial counts to eliminate bias
    equalize_epoch_counts([epochs_SD, epochs_LD])

    # Reading inverse operator
    inv_fname_SD = data_path + meg + 'InvOp_SD_EMEG-inv.fif'
    inv_fname_LD = data_path + meg + 'InvOp_LD_EMEG-inv.fif'

    inv_op_SD = read_inverse_operator(inv_fname_SD)
    inv_op_LD = read_inverse_operator(inv_fname_LD)

    stc_sd = apply_inverse_epochs(epochs_SD,
                                  inv_op_SD,
                                  lambda2,
                                  method='MNE',
                                  pick_ori="normal",
                                  return_generator=False)
    stc_ld = apply_inverse_epochs(epochs_LD,
                                  inv_op_LD,
                                  lambda2,
                                  method='MNE',
                                  pick_ori="normal",
                                  return_generator=False)
    times = epochs_SD.times
    stc_SD_t = []
    stc_LD_t = []

    src_SD = inv_op_SD['src']
    src_LD = inv_op_LD['src']

    for n in np.arange(0, len(stc_sd)):
        stc_SD_t.append(stc_baseline_correction(stc_sd[n], times))
        stc_LD_t.append(stc_baseline_correction(stc_ld[n], times))

    for win in np.arange(0, len(C.con_time_window) - 1):
        print('[i,win]: ', i, win)

        t_min = C.con_time_window[win]
        t_max = C.con_time_window[win + 1]
        stc_SD = []
        stc_LD = []
        for n in np.arange(0, len(stc_sd)):
            stc_SD.append(stc_SD_t[n].copy().crop(t_min * 1e-3, t_max * 1e-3))
            stc_LD.append(stc_LD_t[n].copy().crop(t_min * 1e-3, t_max * 1e-3))

        for k in np.arange(0, 6):
            # print('[i,win,k]: ',i,win,k)
            morphed_labels[k].name = C.rois_labels[k]

        labels_ts_sd = mne.extract_label_time_course(stc_SD, morphed_labels, \
                   src_SD, mode='mean_flip',return_generator=False)
        labels_ts_ld = mne.extract_label_time_course(stc_LD, morphed_labels, \
                   src_LD, mode='mean_flip',return_generator=False)

        for f in np.arange(0, len(C.con_freq_band) - 1):
            print('[i,win,k,f]: ', i, win, k, f)
            f_min = C.con_freq_band[f]
            f_max = C.con_freq_band[f + 1]
            print(f_min, f_max)

            con_SD, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                labels_ts_sd,
                method=method,
                mode='fourier',
                sfreq=500,
                fmin=f_min,
                fmax=f_max,
                faverage=True,
                n_jobs=10)

            con_LD, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                labels_ts_ld,
                method=method,
                mode='fourier',
                sfreq=500,
                fmin=f_min,
                fmax=f_max,
                faverage=True,
                n_jobs=10)

            con_labels_SD[win][f] = con_SD.reshape(6, 6)
            con_labels_LD[win][f] = con_LD.reshape(6, 6)

    with open(con_SD_file_name, "wb") as fp:  #Pickling
        pickle.dump(con_labels_SD, fp)

    with open(con_LD_file_name, "wb") as fp:  #Pickling
        pickle.dump(con_labels_LD, fp)
    e = time.time()
    print(e - s)
コード例 #26
0
def compute_ROIs_inv_sol(raw_filename, sbj_id, sbj_dir, fwd_filename,
                         cov_fname, is_epoched=False, event_id=None,
                         t_min=None, t_max=None,
                         is_evoked=False, events_id=[],
                         snr=1.0, inv_method='MNE',
                         parc='aparc', aseg=False, aseg_labels=[],
                         is_blind=False, labels_removed=[], save_stc=False):
    import os
    import os.path as op
    import numpy as np
    import mne
    import pickle

    from mne.io import read_raw_fif
    from mne import read_epochs
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from mne.minimum_norm import apply_inverse_epochs, apply_inverse
    from mne import get_volume_labels_from_src

    from nipype.utils.filemanip import split_filename as split_f

    from neuropype_ephy.preproc import create_reject_dict

    try:
        traits.undefined(event_id)
    except NameError:
        event_id = None

    print '\n*** READ raw filename %s ***\n' % raw_filename
    if is_epoched and event_id is None:
        epochs = read_epochs(raw_filename)
        info = epochs.info
    else:
        raw = read_raw_fif(raw_filename)
        info = raw.info

    subj_path, basename, ext = split_f(info['filename'])

    print '\n*** READ noise covariance %s ***\n' % cov_fname
    noise_cov = mne.read_cov(cov_fname)

    print '\n*** READ FWD SOL %s ***\n' % fwd_filename
    forward = mne.read_forward_solution(fwd_filename)

    if not aseg:
        forward = mne.convert_forward_solution(forward, surf_ori=True,
                                               force_fixed=False)

    lambda2 = 1.0 / snr ** 2

    # compute inverse operator
    print '\n*** COMPUTE INV OP ***\n'
    if not aseg:
        loose = 0.2
        depth = 0.8
    else:
        loose = None
        depth = None

    inverse_operator = make_inverse_operator(info, forward, noise_cov,
                                             loose=loose, depth=depth,
                                             fixed=False)

    # apply inverse operator to the time windows [t_start, t_stop]s
    print '\n*** APPLY INV OP ***\n'
    if is_epoched and event_id is not None:
        events = mne.find_events(raw)
        picks = mne.pick_types(info, meg=True, eog=True, exclude='bads')
        reject = create_reject_dict(info)

        if is_evoked:
            epochs = mne.Epochs(raw, events, events_id, t_min, t_max,
                                picks=picks, baseline=(None, 0), reject=reject)
            evoked = [epochs[k].average() for k in events_id]
            snr = 3.0
            lambda2 = 1.0 / snr ** 2

            ev_list = events_id.items()
            for k in range(len(events_id)):
                stc = apply_inverse(evoked[k], inverse_operator, lambda2,
                                    inv_method, pick_ori=None)

                print '\n*** STC for event %s ***\n' % ev_list[k][0]
                stc_file = op.abspath(basename + '_' + ev_list[k][0])

                print '***'
                print 'stc dim ' + str(stc.shape)
                print '***'

                if not aseg:
                    stc.save(stc_file)

        else:
            epochs = mne.Epochs(raw, events, event_id, t_min, t_max,
                                picks=picks, baseline=(None, 0), reject=reject)
            stc = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                       inv_method, pick_ori=None)

            print '***'
            print 'len stc %d' % len(stc)
            print '***'

    elif is_epoched and event_id is None:
        stc = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                   inv_method, pick_ori=None)

        print '***'
        print 'len stc %d' % len(stc)
        print '***'
    else:
        stc = apply_inverse_raw(raw, inverse_operator, lambda2, inv_method,
                                label=None,
                                start=None, stop=None,
                                buffer_size=1000,
                                pick_ori=None)  # None 'normal'

        print '***'
        print 'stc dim ' + str(stc.shape)
        print '***'

    if save_stc:
        if aseg:
            for i in range(len(stc)):
                try:
                    os.mkdir(op.join(subj_path, 'TS'))
                except OSError:
                    pass
                stc_file = op.join(subj_path, 'TS', basename + '_' +
                                   inv_method + '_stc_' + str(i) + '.npy')

                if not op.isfile(stc_file):
                    np.save(stc_file, stc[i].data)

    labels_cortex = mne.read_labels_from_annot(sbj_id, parc=parc,
                                               subjects_dir=sbj_dir)
    if is_blind:
        for l in labels_cortex:
            if l.name in labels_removed:
                print l.name
                labels_cortex.remove(l)

    print '\n*** %d ***\n' % len(labels_cortex)

    src = inverse_operator['src']

    # allow_empty : bool -> Instead of emitting an error, return all-zero time
    # courses for labels that do not have any vertices in the source estimate
    label_ts = mne.extract_label_time_course(stc, labels_cortex, src,
                                             mode='mean',
                                             allow_empty=True,
                                             return_generator=False)

    # save results in .npy file that will be the input for spectral node
    print '\n*** SAVE ROI TS ***\n'
    print len(label_ts)

    ts_file = op.abspath(basename + '_ROI_ts.npy')
    np.save(ts_file, label_ts)

    if aseg:
        print sbj_id
        labels_aseg = get_volume_labels_from_src(src, sbj_id, sbj_dir)
        labels = labels_cortex + labels_aseg
    else:
        labels = labels_cortex

    print labels[0].pos
    print len(labels)

    labels_file = op.abspath('labels.dat')
    with open(labels_file, "wb") as f:
        pickle.dump(len(labels), f)
        for value in labels:
            pickle.dump(value, f)

    label_names_file = op.abspath('label_names.txt')
    label_coords_file = op.abspath('label_coords.txt')

    label_names = []
    label_coords = []

    for value in labels:
        label_names.append(value.name)
#        label_coords.append(value.pos[0])
        label_coords.append(np.mean(value.pos, axis=0))

    np.savetxt(label_names_file, np.array(label_names, dtype=str),
               fmt="%s")
    np.savetxt(label_coords_file, np.array(label_coords, dtype=float),
               fmt="%f %f %f")

    return ts_file, labels_file, label_names_file, label_coords_file
コード例 #27
0
def SN_functional_connectivity_bands_runs(i, method, SN_ROI):
    s = time.time()
    meg = subjects[i]
    sub_to = MRI_sub[i][1:15]
    stc_F_file_name = os.path.expanduser(
        '~'
    ) + '/old_semnet/my_semnet/json_files/connectivity/stc_' + method + '200_F_bands_SD_sub' + str(
        i) + '.json'
    stc_O_file_name = os.path.expanduser(
        '~'
    ) + '/old_semnet/my_semnet/json_files/connectivity/stc_' + method + '200_O_bands_LD_sub' + str(
        i) + '.json'
    stc_M_file_name = os.path.expanduser(
        '~'
    ) + '/old_semnet/my_semnet/json_files/connectivity/stc_' + method + '200_M_bands_SD_sub' + str(
        i) + '.json'
    stc_SD_file_name = os.path.expanduser(
        '~'
    ) + '/old_semnet/my_semnet/json_files/connectivity/stc_' + method + '200_mean_bands_SD_sub' + str(
        i) + '.json'
    stc_LD_file_name = os.path.expanduser(
        '~'
    ) + '/old_semnet/my_semnet/json_files/connectivity/stc_' + method + '200_mean_bands_LD_sub' + str(
        i) + '.json'

    morphed_labels = mne.morph_labels(SN_ROI,
                                      subject_to=sub_to,
                                      subject_from='fsaverage',
                                      subjects_dir=data_path)

    # Reading epochs

    # Reading epochs
    epo_name_LD = data_path + meg + 'block_LD_words_epochs-epo.fif'

    epochs_ld = mne.read_epochs(epo_name_LD, preload=True)

    epochs_LD = epochs_ld['words'].copy().resample(500)

    epoch_fname_fruit = data_path + meg + 'block_fruit_epochs-epo.fif'
    epoch_fname_odour = data_path + meg + 'block_odour_epochs-epo.fif'
    epoch_fname_milk = data_path + meg + 'block_milk_epochs-epo.fif'

    epochs_fruit = mne.read_epochs(epoch_fname_fruit, preload=True)
    epochs_odour = mne.read_epochs(epoch_fname_odour, preload=True)
    epochs_milk = mne.read_epochs(epoch_fname_milk, preload=True)

    epochs_f = mne.epochs.combine_event_ids(
        epochs_fruit, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})
    epochs_o = mne.epochs.combine_event_ids(
        epochs_odour, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})
    epochs_m = mne.epochs.combine_event_ids(
        epochs_milk, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})

    epochs_f = epochs_f['words'].copy().resample(500)
    epochs_o = epochs_o['words'].copy().resample(500)
    epochs_m = epochs_m['words'].copy().resample(500)

    # Reading inverse operator
    inv_fname_SD = data_path + meg + 'InvOp_SD_EMEG-inv.fif'
    inv_fname_LD = data_path + meg + 'InvOp_LD_EMEG-inv.fif'

    inv_op_SD = read_inverse_operator(inv_fname_SD)
    inv_op_LD = read_inverse_operator(inv_fname_LD)

    stc_f = apply_inverse_epochs(epochs_f,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_o = apply_inverse_epochs(epochs_o,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_m = apply_inverse_epochs(epochs_m,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_ld = apply_inverse_epochs(epochs_LD,
                                  inv_op_LD,
                                  lambda2,
                                  method='MNE',
                                  pick_ori="normal",
                                  return_generator=False)

    src_SD = inv_op_SD['src']
    src_LD = inv_op_LD['src']

    # Construct indices to estimate connectivity between the label time course
    # and all source space time courses
    vertices_SD = [src_SD[j]['vertno'] for j in range(2)]
    n_signals_tot = 1 + len(vertices_SD[0]) + len(vertices_SD[1])
    indices = seed_target_indices([0], np.arange(1, n_signals_tot))

    morph_SD = mne.compute_source_morph(src=inv_op_SD['src'],
                                        subject_from=sub_to,
                                        subject_to=C.subject_to,
                                        spacing=C.spacing_morph,
                                        subjects_dir=C.data_path)
    morph_LD = mne.compute_source_morph(src=inv_op_LD['src'],
                                        subject_from=sub_to,
                                        subject_to=C.subject_to,
                                        spacing=C.spacing_morph,
                                        subjects_dir=C.data_path)

    for win in np.arange(0, len(C.con_time_window) - 1):
        print('[i,win]: ', i, win)

        t_min = C.con_time_window[win]
        t_max = C.con_time_window[win + 1]
        stc_F = []
        stc_O = []
        stc_M = []
        stc_LD = []

        for n in np.arange(0, len(stc_f)):
            stc_F.append(stc_f[n].copy().crop(t_min * 1e-3, t_max * 1e-3))
        for n in np.arange(0, len(stc_o)):
            stc_O.append(stc_o[n].copy().crop(t_min * 1e-3, t_max * 1e-3))
        for n in np.arange(0, len(stc_m)):
            stc_M.append(stc_m[n].copy().crop(t_min * 1e-3, t_max * 1e-3))
        for n in np.arange(0, len(stc_ld)):
            stc_LD.append(stc_ld[n].copy().crop(t_min * 1e-3, t_max * 1e-3))

        for k in np.arange(0, 6):
            print('[i,win,k]: ', i, win, k)
            morphed_labels[k].name = C.rois_labels[k]

            seed_ts_f = mne.extract_label_time_course(stc_F,
                                                      morphed_labels[k],
                                                      src_SD,
                                                      mode='mean_flip',
                                                      return_generator=False)
            seed_ts_o = mne.extract_label_time_course(stc_O,
                                                      morphed_labels[k],
                                                      src_SD,
                                                      mode='mean_flip',
                                                      return_generator=False)
            seed_ts_m = mne.extract_label_time_course(stc_M,
                                                      morphed_labels[k],
                                                      src_SD,
                                                      mode='mean_flip',
                                                      return_generator=False)
            seed_ts_ld = mne.extract_label_time_course(stc_LD,
                                                       morphed_labels[k],
                                                       src_LD,
                                                       mode='mean_flip',
                                                       return_generator=False)

            for f in np.arange(0, len(C.con_freq_band) - 1):
                print('[i,win,k,f]: ', i, win, k, f)
                f_min = C.con_freq_band[f]
                f_max = C.con_freq_band[f + 1]
                print(f_min, f_max)

                comb_ts_f = zip(seed_ts_f, stc_F)
                comb_ts_o = zip(seed_ts_o, stc_O)
                comb_ts_m = zip(seed_ts_m, stc_M)
                comb_ts_ld = zip(seed_ts_ld, stc_LD)

                con_F, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                    comb_ts_f,
                    method=method,
                    mode='fourier',
                    indices=indices,
                    sfreq=500,
                    fmin=f_min,
                    fmax=f_max,
                    faverage=True,
                    n_jobs=10)

                con_O, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                    comb_ts_o,
                    method=method,
                    mode='fourier',
                    indices=indices,
                    sfreq=500,
                    fmin=f_min,
                    fmax=f_max,
                    faverage=True,
                    n_jobs=10)

                con_M, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                    comb_ts_m,
                    method=method,
                    mode='fourier',
                    indices=indices,
                    sfreq=500,
                    fmin=f_min,
                    fmax=f_max,
                    faverage=True,
                    n_jobs=10)
                con_LD, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                    comb_ts_ld,
                    method=method,
                    mode='fourier',
                    indices=indices,
                    sfreq=500,
                    fmin=f_min,
                    fmax=f_max,
                    faverage=True,
                    n_jobs=10)

                con_SD = (con_F + con_O + con_M) / 3

                con_stc_F = mne.SourceEstimate(con_F,
                                               vertices=vertices_SD,
                                               tmin=t_min * 1e-3,
                                               tstep=2e-3,
                                               subject=sub_to)
                con_stc_O = mne.SourceEstimate(con_O,
                                               vertices=vertices_SD,
                                               tmin=t_min * 1e-3,
                                               tstep=2e-3,
                                               subject=sub_to)
                con_stc_M = mne.SourceEstimate(con_M,
                                               vertices=vertices_SD,
                                               tmin=t_min * 1e-3,
                                               tstep=2e-3,
                                               subject=sub_to)
                con_stc_SD = mne.SourceEstimate(con_SD,
                                                vertices=vertices_SD,
                                                tmin=t_min * 1e-3,
                                                tstep=2e-3,
                                                subject=sub_to)

                con_stc_LD = mne.SourceEstimate(con_LD,
                                                vertices=vertices_SD,
                                                tmin=t_min * 1e-3,
                                                tstep=2e-3,
                                                subject=sub_to)

                stc_total_F[win][k][f] = morph_SD.apply(con_stc_F)
                stc_total_O[win][k][f] = morph_SD.apply(con_stc_O)
                stc_total_M[win][k][f] = morph_SD.apply(con_stc_M)
                stc_total_SD[win][k][f] = morph_SD.apply(con_stc_SD)
                stc_total_LD[win][k][f] = morph_LD.apply(con_stc_LD)

    # with open(stc_F_file_name, "wb") as fp:   #Pickling
    #     pickle.dump(stc_total_F, fp)
    # with open(stc_O_file_name, "wb") as fp:   #Pickling
    #     pickle.dump(stc_total_O, fp)
    # with open(stc_M_file_name, "wb") as fp:   #Pickling
    #     pickle.dump(stc_total_M, fp)
    # with open(stc_SD_file_name, "wb") as fp:   #Pickling
    #     pickle.dump(stc_total_SD, fp)
    with open(stc_LD_file_name, "wb") as fp:  # Pickling
        pickle.dump(stc_total_LD, fp)
    e = time.time()
    print(e - s)
コード例 #28
0
                                                    eog=150e-6))

# Compute inverse solution and for each epoch. Note that since we are passing
# the output to both extract_label_time_course and the phase_slope_index
# functions, we have to use "return_generator=False", since it is only possible
# to iterate over generators once.
snr = 1.0  # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=True)

# Now, we generate seed time series by averaging the activity in the left
# visual corex
label = mne.read_label(fname_label)
src = inverse_operator['src']  # the source space used
seed_ts = mne.extract_label_time_course(stcs, label, src, mode='mean_flip',
                                        verbose='error')

# Combine the seed time course with the source estimates. There will be a total
# of 7500 signals:
# index 0: time course extracted from label
# index 1..7499: dSPM source space time courses
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=True)
comb_ts = list(zip(seed_ts, stcs))

# Construct indices to estimate connectivity between the label time course
# and all source space time courses
vertices = [src[i]['vertno'] for i in range(2)]
n_signals_tot = 1 + len(vertices[0]) + len(vertices[1])

indices = seed_target_indices([0], np.arange(1, n_signals_tot))
コード例 #29
0
epochs = mne.concatenate_epochs(all_epochs)

raw = mne.concatenate_raws(raws)
bem_fname = op.join(bem_dir, subject + '-20480-bem-sol.fif')
bem = mne.read_bem_solution(bem_fname)
src = mne.setup_source_space(subject, spacing='ico5',
                             add_dist=False, subjects_dir=subjects_mri_dir)

fwd = mne.make_forward_solution(raw.info, trans=trans_file, src=src, bem=bem, meg=True, eeg=False, n_jobs=2)
inv = mne.minimum_norm.make_inverse_operator(raw.info, fwd, cov, loose=0.2, depth=0.8)

labels = mne.read_labels_from_annot(subject, 'aparc', subjects_dir=subjects_mri_dir)
labels_name = np.array([label.name for label in labels])
stcs = mne.minimum_norm.apply_inverse_epochs(epochs, inv, lambda2=1. / 9., pick_ori='normal', return_generator=True)

label_ts = np.array(mne.extract_label_time_course(stcs, labels, inv['src'], return_generator=False))

psds, freqs = psd_array_multitaper(label_ts, epochs.info['sfreq'], fmin=2, fmax=55)

tfr_alpha = tfr_array_multitaper(label_ts, epochs.info['sfreq'], freqs=np.arange(8, 13), output='avg_power', n_jobs=4)
tfr_beta = tfr_array_multitaper(label_ts, epochs.info['sfreq'], freqs=np.arange(16, 30), output='avg_power', n_jobs=4)
tfr_lgamma = tfr_array_multitaper(label_ts, epochs.info['sfreq'], freqs=np.arange(30, 55), output='avg_power', n_jobs=4)
tfr_hgamma = tfr_array_multitaper(label_ts, epochs.info['sfreq'], freqs=np.arange(65, 100), output='avg_power', n_jobs=4)


for ix, inds in enumerate(np.split(np.arange(68), 4)):
    plt.figure(figsize=(15, 20))
    plt.rc('xtick', labelsize=25)
    plt.rc('ytick', labelsize=25)
    lineObjects = plt.plot(freqs, 20 * np.log10(psds.mean(0).T)[:, inds], linewidth=4)
    plt.xlabel('Frequency (Hz)', fontsize=30)
コード例 #30
0
    stcs_slow = apply_inverse_epochs(slow_epo_isi,
                                     inverse_operator,
                                     lambda2,
                                     method='sLORETA',
                                     pick_ori="normal",
                                     return_generator=True)

    # Now, we generate seed time series from each vertex in the left V1
    vertex_v1 = mne.label.select_sources('Case0102',
                                         label=stc_label_v1[0],
                                         location=vert_num_v1,
                                         subjects_dir=subjects_dir)

    seed_ts_slow_v1 = mne.extract_label_time_course(stcs_slow,
                                                    vertex_v1,
                                                    src,
                                                    mode='mean_flip',
                                                    verbose='error')

    psi_slow_v1_mt = np.zeros([len(vertices_mt), 1])
    for vert_num_mt in vertices_mt:

        stcs_slow = apply_inverse_epochs(slow_epo_isi,
                                         inverse_operator,
                                         lambda2,
                                         method='sLORETA',
                                         pick_ori="normal",
                                         return_generator=False)

        # Now, we generate seed time series from each vertex in the left V1
        vertex_mt = mne.label.select_sources('Case0102',
コード例 #31
0
def SN_functional_connectivity_bands(i, method):
    s = time.time()
    meg = subjects[i]
    sub_to = MRI_sub[i][1:15]
    stc_SD_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/stc_' + method + '200_equalized_bands_SD_sub' + str(
        i) + '.json'
    stc_LD_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/stc_' + method + '200_equalized_bands_LD_sub' + str(
        i) + '.json'
    # stc_SD_file_name=os.path.expanduser('~') +'/my_semnet/json_files/connectivity/stc_'+method+'bl_bands_SD_sub'+str(i)+'.json'
    # stc_LD_file_name=os.path.expanduser('~') +'/my_semnet/json_files/connectivity/stc_'+method+'bl_bands_LD_sub'+str(i)+'.json'

    morphed_labels = mne.morph_labels(SN_ROI,subject_to=data_path+sub_to,\
                  subject_from='fsaverage',subjects_dir=data_path)

    # Reading epochs
    epo_name_SD = data_path + meg + 'block_SD_words_epochs-epo.fif'
    epo_name_LD = data_path + meg + 'block_LD_words_epochs-epo.fif'

    epochs_sd = mne.read_epochs(epo_name_SD, preload=True)
    epochs_ld = mne.read_epochs(epo_name_LD, preload=True)

    epochs_SD = epochs_sd['words'].copy().resample(500)
    epochs_LD = epochs_ld['words'].copy().resample(500)

    equalize_epoch_counts([epochs_SD, epochs_LD])
    # Reading inverse operator
    inv_fname_SD = data_path + meg + 'InvOp_SD_EMEG-inv.fif'
    inv_fname_LD = data_path + meg + 'InvOp_LD_EMEG-inv.fif'

    inv_op_SD = read_inverse_operator(inv_fname_SD)
    inv_op_LD = read_inverse_operator(inv_fname_LD)

    stc_sd = apply_inverse_epochs(epochs_SD,
                                  inv_op_SD,
                                  lambda2,
                                  method='MNE',
                                  pick_ori="normal",
                                  return_generator=False)
    stc_ld = apply_inverse_epochs(epochs_LD,
                                  inv_op_LD,
                                  lambda2,
                                  method='MNE',
                                  pick_ori="normal",
                                  return_generator=False)
    src_SD = inv_op_SD['src']
    src_LD = inv_op_LD['src']
    # Construct indices to estimate connectivity between the label time course
    # and all source space time courses
    vertices_SD = [src_SD[j]['vertno'] for j in range(2)]
    n_signals_tot = 1 + len(vertices_SD[0]) + len(vertices_SD[1])
    indices = seed_target_indices([0], np.arange(1, n_signals_tot))

    morph_SD = mne.compute_source_morph(src=inv_op_SD['src'],\
                    subject_from=sub_to, subject_to=C.subject_to,\
                    spacing=C.spacing_morph, subjects_dir=C.data_path)

    morph_LD = mne.compute_source_morph(src= inv_op_LD['src'],\
                    subject_from=sub_to, subject_to=C.subject_to,\
                    spacing=C.spacing_morph, subjects_dir=C.data_path)

    for win in np.arange(0, len(C.con_time_window) - 1):
        print('[i,win]: ', i, win)

        t_min = C.con_time_window[win]
        t_max = C.con_time_window[win + 1]
        stc_SD = []
        stc_LD = []

        for n in np.arange(0, len(stc_sd)):
            stc_SD.append(stc_sd[n].copy().crop(t_min * 1e-3, t_max * 1e-3))

        for n in np.arange(0, len(stc_ld)):
            stc_LD.append(stc_ld[n].copy().crop(t_min * 1e-3, t_max * 1e-3))

        for k in np.arange(0, 6):
            print('[i,win,k]: ', i, win, k)
            morphed_labels[k].name = C.rois_labels[k]

            seed_ts_sd = mne.extract_label_time_course(stc_SD, morphed_labels[k], \
                        src_SD, mode='mean_flip',return_generator=False)
            seed_ts_ld = mne.extract_label_time_course(stc_LD, morphed_labels[k], \
                        src_LD, mode='mean_flip',return_generator=False)

            for f in np.arange(0, len(C.con_freq_band) - 1):
                print('[i,win,k,f]: ', i, win, k, f)
                f_min = C.con_freq_band[f]
                f_max = C.con_freq_band[f + 1]
                print(f_min, f_max)

                comb_ts_sd = zip(seed_ts_sd, stc_SD)
                comb_ts_ld = zip(seed_ts_ld, stc_LD)

                con_SD, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                    comb_ts_sd,
                    method=method,
                    mode='fourier',
                    indices=indices,
                    sfreq=500,
                    fmin=f_min,
                    fmax=f_max,
                    faverage=True,
                    n_jobs=10)

                con_LD, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                    comb_ts_ld,
                    method=method,
                    mode='fourier',
                    indices=indices,
                    sfreq=500,
                    fmin=f_min,
                    fmax=f_max,
                    faverage=True,
                    n_jobs=10)

                con_stc_SD = mne.SourceEstimate(con_SD, vertices=vertices_SD,\
                              tmin=t_min*1e-3, tstep=2e-3,subject=sub_to)

                con_stc_LD = mne.SourceEstimate(con_LD, vertices=vertices_SD,\
                              tmin=t_min*1e-3, tstep=2e-3,subject=sub_to)

                stc_total_SD[win][k][f] = morph_SD.apply(con_stc_SD)
                stc_total_LD[win][k][f] = morph_LD.apply(con_stc_LD)

    with open(stc_SD_file_name, "wb") as fp:  #Pickling
        pickle.dump(stc_total_SD, fp)

    with open(stc_LD_file_name, "wb") as fp:  #Pickling
        pickle.dump(stc_total_LD, fp)
    e = time.time()
    print(e - s)
コード例 #32
0
                             lambda2,
                             method,
                             label=None,
                             pick_ori="normal",
                             return_generator=True)
        for sti in hcp_epochs.event_id
    ]
    sti_names = [sti for sti in hcp_epochs.event_id]
    labels = mne.read_labels_from_annot(subject,
                                        parc='aparc',
                                        subjects_dir=subjects_dir)
    label_names = [label.name for label in labels]
    label_ts_list = [
        mne.extract_label_time_course(stcs_list[ite],
                                      labels,
                                      src,
                                      mode='mean_flip',
                                      return_generator=False)
        for ite in range(len(stcs_list))
    ]

    del stcs_list, noise_cov, inverse_operator, src
    # using mne build-in function
    from mne.connectivity import spectral_connectivity
    cwt_freqs = np.linspace(4, 45, 42)
    cwt_n_cycles = np.linspace(3, 15, 42)
    sfreq = hcp_epochs.info['sfreq']  # the sampling frequency
    del hcp_epochs
    con_methods = 'wpli'
    con_list = []
    for label_ts in label_ts_list:
コード例 #33
0
def SN_functional_connectivity_betweenROIs_runs_BL(i, method):
    s = time.time()
    meg = subjects[i]
    sub_to = MRI_sub[i][1:15]

    # stc_SD_file_name=os.path.expanduser('~') +'/my_semnet/json_files/connectivity/con_labels_'+method+'_bl_bands_SD_sub'+str(i)+'.json'
    # stc_LD_file_name=os.path.expanduser('~') +'/my_semnet/json_files/connectivity/con_labels_'+method+'_bl_bands_LD_sub'+str(i)+'.json'
    stc_F_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/con_labels_' + method + '_bl_bands_F_sub' + str(
        i) + '.json'
    stc_M_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/con_labels_' + method + '_bl_bands_M_sub' + str(
        i) + '.json'
    stc_O_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/con_labels_' + method + '_bl_bands_O_sub' + str(
        i) + '.json'

    morphed_labels = mne.morph_labels(SN_ROI,subject_to=data_path+sub_to,\
                  subject_from='fsaverage',subjects_dir=data_path)

    # Reading epochs
    epoch_fname_fruit = data_path + meg + 'block_fruit_epochs-epo.fif'
    epoch_fname_odour = data_path + meg + 'block_odour_epochs-epo.fif'
    epoch_fname_milk = data_path + meg + 'block_milk_epochs-epo.fif'
    epo_name_LD = data_path + meg + 'block_LD_words_epochs-epo.fif'

    epochs_fruit = mne.read_epochs(epoch_fname_fruit, preload=True)
    epochs_odour = mne.read_epochs(epoch_fname_odour, preload=True)
    epochs_milk = mne.read_epochs(epoch_fname_milk, preload=True)
    epochs_ld = mne.read_epochs(epo_name_LD, preload=True)

    epochs_f = mne.epochs.combine_event_ids(
        epochs_fruit, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})
    epochs_o = mne.epochs.combine_event_ids(
        epochs_odour, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})
    epochs_m = mne.epochs.combine_event_ids(
        epochs_milk, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})

    epochs_f = epochs_f['words'].copy().crop(-.200, 0).resample(500)
    epochs_o = epochs_o['words'].copy().crop(-.200, 0).resample(500)
    epochs_m = epochs_m['words'].copy().crop(-.200, 0).resample(500)
    epochs_LD = epochs_ld['words'].copy().crop(-.200, 0).resample(500)

    # Reading inverse operator
    inv_fname_SD = data_path + meg + 'InvOp_SD_EMEG-inv.fif'
    inv_fname_LD = data_path + meg + 'InvOp_LD_EMEG-inv.fif'

    inv_op_SD = read_inverse_operator(inv_fname_SD)
    inv_op_LD = read_inverse_operator(inv_fname_LD)

    stc_F = apply_inverse_epochs(epochs_f,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_O = apply_inverse_epochs(epochs_o,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_M = apply_inverse_epochs(epochs_m,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)

    stc_LD = apply_inverse_epochs(epochs_LD,
                                  inv_op_LD,
                                  lambda2,
                                  method='MNE',
                                  pick_ori="normal",
                                  return_generator=False)

    src_SD = inv_op_SD['src']
    src_LD = inv_op_LD['src']

    for k in np.arange(0, 6):
        # print('[i,win,k]: ',i,win,k)
        morphed_labels[k].name = C.rois_labels[k]

    for f in np.arange(0, len(C.con_freq_band) - 1):
        print('[i,k,f]: ', i, k, f)
        f_min = C.con_freq_band[f]
        f_max = C.con_freq_band[f + 1]
        print(f_min, f_max)

        labels_ts_f = mne.extract_label_time_course(stc_F, morphed_labels, \
               src_SD, mode='mean_flip',return_generator=False)
        labels_ts_o = mne.extract_label_time_course(stc_O, morphed_labels, \
           src_SD, mode='mean_flip',return_generator=False)
        labels_ts_m = mne.extract_label_time_course(stc_M, morphed_labels, \
                   src_SD, mode='mean_flip',return_generator=False)

        labels_ts_ld= mne.extract_label_time_course(stc_LD, morphed_labels, \
               src_LD, mode='mean_flip',return_generator=False)

        con_F, freqs, times, n_epochs, n_tapers = spectral_connectivity(
            labels_ts_f,
            method=method,
            mode='fourier',
            sfreq=500,
            fmin=f_min,
            fmax=f_max,
            faverage=True,
            n_jobs=10)
        con_O, freqs, times, n_epochs, n_tapers = spectral_connectivity(
            labels_ts_o,
            method=method,
            mode='fourier',
            sfreq=500,
            fmin=f_min,
            fmax=f_max,
            faverage=True,
            n_jobs=10)
        con_M, freqs, times, n_epochs, n_tapers = spectral_connectivity(
            labels_ts_m,
            method=method,
            mode='fourier',
            sfreq=500,
            fmin=f_min,
            fmax=f_max,
            faverage=True,
            n_jobs=10)

        # con_LD, freqs, times, n_epochs, n_tapers = spectral_connectivity(
        #      labels_ts_ld, method=method, mode='fourier',
        #     sfreq=500, fmin=f_min, fmax=f_max, faverage=True, n_jobs=10)
        # con_SD=(con_F+ con_O+ con_M)/3

        # con_labels_SD[f]= con_SD.reshape(6,6)
        # con_labels_LD[f]= con_LD.reshape(6,6)
        con_labels_F[f] = con_F.reshape(6, 6)
        con_labels_M[f] = con_M.reshape(6, 6)
        con_labels_O[f] = con_O.reshape(6, 6)

    # with open(stc_SD_file_name, "wb") as fp:   #Pickling
    #     pickle.dump(con_labels_SD, fp)

    # with open(stc_LD_file_name, "wb") as fp:   #Pickling
    #     pickle.dump(con_labels_LD, fp)

    with open(stc_F_file_name, "wb") as fp:  #Pickling
        pickle.dump(con_labels_F, fp)

    with open(stc_M_file_name, "wb") as fp:  #Pickling
        pickle.dump(con_labels_M, fp)

    with open(stc_O_file_name, "wb") as fp:  #Pickling
        pickle.dump(con_labels_O, fp)

    e = time.time()
    print(e - s)
コード例 #34
0
                                         fixed=False)


stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, inv_method,
                            pick_ori=None, return_generator=True)

# Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
labels_parc = mne.read_labels_from_annot(subject, parc=parc,
                                         subjects_dir=subjects_dir)

# Average the source estimates within each label of the cortical parcellation
# and each sub structures contained in the src space
# If mode = 'mean_flip' this option is used only for the cortical label
src = inverse_operator['src']
label_ts = mne.extract_label_time_course(stcs, labels_parc, src,
                                         mode='mean_flip',
                                         allow_empty=True,
                                         return_generator=False)

# We compute the connectivity in the alpha band and plot it using a circular
# graph layout
fmin = 8.
fmax = 13.
sfreq = raw.info['sfreq']  # the sampling frequency
con, freqs, times, n_epochs, n_tapers = spectral_connectivity(
    label_ts, method='pli', mode='multitaper', sfreq=sfreq, fmin=fmin,
    fmax=fmax, faverage=True, mt_adaptive=True, n_jobs=1)

# We create a list of Label containing also the sub structures
labels_aseg = mne.get_volume_labels_from_src(src, subject, subjects_dir)
labels = labels_parc + labels_aseg
コード例 #35
0
ファイル: dics_apriori.py プロジェクト: TinnErlangen/ATT
         # X[idx].append(np.vstack(X_temp))
         # idx += 1
         for wav_idx, wav in enumerate(wavs):
             stc_temp = mne.read_source_estimate(
                 "{dir}stcs/nc_{a}_{b}_{c}_{f0}-{f1}Hz_ico{d}-lh.stc".
                 format(dir=proc_dir,
                        a=sub,
                        b=cond,
                        c=wav,
                        f0=fr[0],
                        f1=fr[-1],
                        d=spacing))
             stc_temp = morph.apply(stc_temp)
             for lab_idx, lab in enumerate(labels):
                 X_temp = mne.extract_label_time_course(stc_temp,
                                                        lab,
                                                        fs_src,
                                                        mode="pca_flip")
                 X[lab_idx][idx].append(X_temp.mean(axis=-1).squeeze())
             idx += 1
 X = [[(np.array(x) * 1e+26).astype(np.float32) for x in xx] for xx in X]
 del X_temp, morph, src
 X = np.array(X).swapaxes(0, 2)
 result = f_mway_rm(X, factor_levels=factor_levels, effects=effects)[0][0]
 sig_areas = np.where(result[1] < 0.05)[0]
 if sig_areas.size == 0: continue
 for sa in np.nditer(sig_areas):
     title = "{}_{}".format(k, labels[sa].name)
     figures.append(mlab.figure(title))
     brains.append(
         Brain('fsaverage',
               'both',
コード例 #36
0
                                                    eog=150e-6))

# Compute inverse solution and for each epoch. Note that since we are passing
# the output to both extract_label_time_course and the phase_slope_index
# functions, we have to use "return_generator=False", since it is only possible
# to iterate over generators once.
snr = 1.0  # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=True)

# Now, we generate seed time series by averaging the activity in the left
# visual corex
label = mne.read_label(fname_label)
src = inverse_operator['src']  # the source space used
seed_ts = mne.extract_label_time_course(stcs, label, src, mode='mean_flip',
                                        verbose='error')

# Combine the seed time course with the source estimates. There will be a total
# of 7500 signals:
# index 0: time course extracted from label
# index 1..7499: dSPM source space time courses
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=True)
comb_ts = list(zip(seed_ts, stcs))

# Construct indices to estimate connectivity between the label time course
# and all source space time courses
vertices = [src[i]['vertno'] for i in range(2)]
n_signals_tot = 1 + len(vertices[0]) + len(vertices[1])

indices = seed_target_indices([0], np.arange(1, n_signals_tot))
コード例 #37
0
# stcs will be a generator object instead of a list.
snr = 1.0  # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
method = "dSPM"  # use dSPM method (could also be MNE or sLORETA)
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=True)

# Read some labels
names = ['Aud-lh', 'Aud-rh', 'Vis-lh', 'Vis-rh']
labels = [mne.read_label(data_path + '/MEG/sample/labels/%s.label' % name)
          for name in names]

# Average the source estimates within each label using sign-flips to reduce
# signal cancellations, also here we return a generator
src = inverse_operator['src']
label_ts = mne.extract_label_time_course(stcs, labels, src, mode='mean_flip',
                                         return_generator=True)

fmin, fmax = 7.5, 40.
sfreq = raw.info['sfreq']  # the sampling frequency

con, freqs, times, n_epochs, n_tapers = spectral_connectivity(
    label_ts, method='wpli2_debiased', mode='multitaper', sfreq=sfreq,
    fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=1)

n_rows, n_cols = con.shape[:2]
fig, axes = plt.subplots(n_rows, n_cols, sharex=True, sharey=True)
for i in range(n_rows):
    for j in range(i + 1):
        if i == j:
            axes[i, j].set_axis_off()
            continue
コード例 #38
0
    tcs_v1 = []
    tcs_v4 = []

    for vert_num_v1 in vertices_v1:

        #one
        # Now, we generate seed time series from each vertex in the left V1
        vertex_v1 = mne.label.select_sources('Case' + subject,
                                             label=stc_label_v1[1],
                                             location=vert_num_v1,
                                             subjects_dir=subjects_dir)

        seed_tc_v1 = mne.extract_label_time_course(stcs_v1,
                                                   vertex_v1,
                                                   src,
                                                   mode='mean_flip',
                                                   verbose='error')
        tcs_v1.append(seed_tc_v1)

    for vert_num_v4 in vertices_v4:

        #two
        # Now, we generate seed time series from each vertex in the left V1
        vertex_v4 = mne.label.select_sources('Case' + subject,
                                             label=stc_label_v4[1],
                                             location=vert_num_v4,
                                             subjects_dir=subjects_dir)

        seed_ts_v4 = mne.extract_label_time_course(stcs_v4,
                                                   vertex_v4,
コード例 #39
0
# Compute inverse solution and for each epoch. By using "return_generator=True"
# stcs will be a generator object instead of a list.
snr = 1.0  # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
method = "dSPM"  # use dSPM method (could also be MNE or sLORETA)
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=True)

# Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
labels, label_colors = mne.labels_from_parc('sample', parc='aparc',
                                            subjects_dir=subjects_dir)

# Average the source estimates within each label using sign-flips to reduce
# signal cancellations, also here we return a generator
src = inverse_operator['src']
label_ts = mne.extract_label_time_course(stcs, labels, src, mode='mean_flip',
                                         return_generator=True)

# Now we are ready to compute the connectivity in the alpha band. Notice
# from the status messages, how mne-python: 1) reads an epoch from the raw
# file, 2) applies SSP and baseline correction, 3) computes the inverse to
# obtain a source estimate, 4) averages the source estimate to obtain a
# time series for each label, 5) includes the label time series in the
# connectivity computation, and then moves to the next epoch. This
# behaviour is because we are using generators and allows us to
# compute connectivity in computationally efficient manner where the amount
# of memory (RAM) needed is independent from the number of epochs.
fmin = 8.
fmax = 13.
sfreq = raw.info['sfreq']  # the sampling frequency

con, freqs, times, n_epochs, n_tapers = spectral_connectivity(label_ts,
コード例 #40
0
                                 dtype=float))

            label_tc.append(pd.concat(tc_trial, axis=1))

        # just using label_tc as name to maintain backward compatibility
        label_tc = pd.concat(label_tc)
    else:
        label_tc = pd.DataFrame([],
                                columns=pd.Index(labelnames, name='label'),
                                index=pd.MultiIndex.from_product(
                                    [[sub], trials, times],
                                    names=['subject', 'trial', 'time']),
                                dtype=float)

        label_tc_gen = mne.extract_label_time_course(stcs_gen,
                                                     labels,
                                                     fwd['src'],
                                                     mode=label_mode)
        for trial, tc in zip(trials, label_tc_gen):
            label_tc.loc[(sub, trial, slice(None)), :] = tc.T

    if fresh:
        mean = label_tc.mean()
    else:
        mean += label_tc.mean()

    with pd.HDFStore(file, mode='a', complib='blosc', complevel=7) as store:
        store.append('label_nv', label_nv)
        store.append('label_tc', label_tc)

    gc.collect()
コード例 #41
0
        label = mne.read_label(label_dir + roi)
        roi_pretty = roi.split('.')[0]
    else:
        roi_pretty = roi.split('/')[-1].split('+')[0]
	# right labels are normally in the second index
	if avg_label[0] is not None:
        	label = avg_label[0].morph(subject_to=s)
	else:
        	label = avg_label[1].morph(subject_to=s)

    evoked_fname = '/Volumes/Shaw/MEG_data/analysis/stop/evoked/%s_stop_BP1-35_DS120-ave.fif' % s
    inv_fname = '/Volumes/Shaw/MEG_data/analysis/stop/%s_task-5-meg-inv.fif' % s
    inverse_operator = read_inverse_operator(inv_fname)

    for i, c in enumerate(conds):
        # calculate source estimates for the whole brain
        evoked = mne.read_evokeds(evoked_fname, condition=c)
        stc = apply_inverse(evoked, inverse_operator, lambda2, method,
                            pick_ori=None)
        ts = mne.extract_label_time_course(stc, label, inverse_operator['src'])
        data[i].append(ts)

# export one CSV file for each condition
for i, c in enumerate(conds):
    fname = out_dir + '%s_%s_%s.csv' % (c, roi_pretty, method)
    fid = open(fname, 'w')
    fid.write('time,' + ','.join(['%.3f' % t for t in evoked.times]) + '\n')
    for j, d in enumerate(data[i]):
        fid.write('%s,' % subjs[j] + ','.join(['%e' % t for t in d[0]]) + '\n')
    fid.close()
コード例 #42
0
# ---------------

fig = stc.volume().plot(initial_time=initial_time,
                        src=src,
                        subjects_dir=subjects_dir)

# %%
# Process labels
# --------------
# Average the source estimates within each label of the cortical parcellation
# and each sub structure contained in the src space

# Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
labels_parc = mne.read_labels_from_annot(subject,
                                         parc=parc,
                                         subjects_dir=subjects_dir)

label_ts = mne.extract_label_time_course([stc],
                                         labels_parc,
                                         src,
                                         mode='mean',
                                         allow_empty=True)

# plot the times series of 2 labels
fig, axes = plt.subplots(1)
axes.plot(1e3 * stc.times, label_ts[0][0, :], 'k', label='bankssts-lh')
axes.plot(1e3 * stc.times, label_ts[0][-1, :].T, 'r', label='Brain-stem')
axes.set(xlabel='Time (ms)', ylabel='MNE current (nAm)')
axes.legend()
mne.viz.tight_layout()
コード例 #43
0
def test_extract_label_time_course():
    """Test extraction of label time courses from stc
    """
    n_stcs = 3
    n_times = 50

    src = read_inverse_operator(fname_inv)['src']
    vertices = [src[0]['vertno'], src[1]['vertno']]
    n_verts = len(vertices[0]) + len(vertices[1])

    # get some labels
    labels_lh, _ = labels_from_parc('sample', hemi='lh',
                                    subjects_dir=subjects_dir)
    labels_rh, _ = labels_from_parc('sample', hemi='rh',
                                    subjects_dir=subjects_dir)
    labels = list()
    labels.extend(labels_lh[:5])
    labels.extend(labels_rh[:4])

    n_labels = len(labels)

    label_means = np.arange(n_labels)[:, None] * np.ones((n_labels, n_times))

    # compute the mean with sign flip
    label_means_flipped = np.zeros_like(label_means)
    for i, label in enumerate(labels):
        label_means_flipped[i] = i * np.mean(label_sign_flip(label, src))

    # generate some stc's with known data
    stcs = list()
    for i in range(n_stcs):
        data = np.zeros((n_verts, n_times))
        # set the value of the stc within each label
        for j, label in enumerate(labels):
            if label.hemi == 'lh':
                idx = np.intersect1d(vertices[0], label.vertices)
                idx = np.searchsorted(vertices[0], idx)
            elif label.hemi == 'rh':
                idx = np.intersect1d(vertices[1], label.vertices)
                idx = len(vertices[0]) + np.searchsorted(vertices[1], idx)
            data[idx] = label_means[j]

        this_stc = SourceEstimate(data, vertices, 0, 1)
        stcs.append(this_stc)

    # test some invalid inputs
    assert_raises(ValueError, extract_label_time_course, stcs, labels,
                  src, mode='notamode')

    # have an empty label
    empty_label = labels[0].copy()
    empty_label.vertices += 1000000
    assert_raises(ValueError, extract_label_time_course, stcs, empty_label,
                  src, mode='mean')

    # but this works:
    tc = extract_label_time_course(stcs, empty_label, src, mode='mean',
                                   allow_empty=True)
    for arr in tc:
        assert_true(arr.shape == (1, n_times))
        assert_array_equal(arr, np.zeros((1, n_times)))

    # test the different modes
    modes = ['mean', 'mean_flip', 'pca_flip']

    for mode in modes:
        label_tc = extract_label_time_course(stcs, labels, src, mode=mode)
        label_tc_method = [stc.extract_label_time_course(labels, src,
                           mode=mode) for stc in stcs]
        assert_true(len(label_tc) == n_stcs)
        assert_true(len(label_tc_method) == n_stcs)
        for tc1, tc2 in zip(label_tc, label_tc_method):
            assert_true(tc1.shape == (n_labels, n_times))
            assert_true(tc2.shape == (n_labels, n_times))
            assert_true(np.allclose(tc1, tc2, rtol=1e-8, atol=1e-16))
            if mode == 'mean':
                assert_array_almost_equal(tc1, label_means)
            if mode == 'mean_flip':
                assert_array_almost_equal(tc1, label_means_flipped)
コード例 #44
0
ファイル: extract_ts_all_subs.py プロジェクト: ahoejlund/CAA
                             "%s_trial_start-epo.fif" % subject)
    # epochs.drop_bad_epochs(reject_params)
    # epochs.resample(250, n_jobs=4)

    for condition in conditions:
        stcs = apply_inverse_epochs(epochs[condition],
                                    inverse_operator,
                                    lambda2,
                                    method,
                                    pick_ori=None)

        for label in labels_sel:
            label_ts = []
            for j in range(len(stcs)):
                ts = mne.extract_label_time_course(stcs[j],
                                                   labels=label,
                                                   src=src,
                                                   mode="pca_flip")
                ts = np.squeeze(ts)
                ts *= np.sign(ts[np.argmax(np.abs(ts))])
                label_ts.append(ts)

            label_ts = np.asarray(label_ts)
            tfr = cwt_morlet(label_ts, epochs.info["sfreq"], freqs,
                             use_fft=True, n_cycles=n_cycle)

            np.save(tf_folder + "%s_%s_%s_%s_%s_sf-tfr" % (subject,
                                                           condition[:3],
                                                           condition[4:],
                                                           label.name, method),
                    tfr)
            np.save(tf_folder + "%s_%s_%s_%s_%s_sf-ts" % (subject,
コード例 #45
0
    else:
        label = avg_label[1].morph(subject_to=s)

    epochs_fname = home + '/data/meg/stop/parsed/%s_stop_parsed_matched_clean_BP1-100_DS300-epo.fif.gz' % s
    epochs = mne.read_epochs(epochs_fname, proj=True)
    fwd_fname = home + '/data/meg/stop/%s_task-5-fwd.fif' % s
    fwd = mne.read_forward_solution(fwd_fname, surf_ori=True)

    # calculate source power estimates for the whole brain
    # quick hack in tmax ot make it the same length as btmax
    data_csds = compute_epochs_csd(epochs[cond], mode='multitaper',
                                   tmin=tmin, tmax=tmax + btmax,
                                   fmin=band[0], fmax=band[1],
                                   fsum=False)
    noise_csds = compute_epochs_csd(epochs[cond], mode='multitaper',
                                    tmin=btmin, tmax=btmax,
                                    fmin=band[0], fmax=band[1],
                                    fsum=False)
    stc = dics_source_power(epochs.info, fwd, noise_csds, data_csds)
    ts = mne.extract_label_time_course(stc, label, fwd['src'])
    data.append(ts)

# export one CSV file
fname = out_dir + '%s_%s_%02dto%02d_tmin%.2f.csv' % (cond, roi_pretty, band[0],
                                                     band[1], tmin)
fid = open(fname, 'w')
fid.write('subj,power\n')
for j, d in enumerate(data):
    fid.write('%s,' % subjs[j] + ','.join(['%e' % t for t in d[0]]) + '\n')
fid.close()
コード例 #46
0
lambda2 = 1.0 / snr ** 2

# Compute inverse operator
inverse_operator = make_inverse_operator(evoked.info, fwd, noise_cov,
                                         depth=None, fixed=False)

stc = apply_inverse(evoked, inverse_operator, lambda2, inv_method,
                    pick_ori=None)

# Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
labels_parc = mne.read_labels_from_annot(
    subject, parc=parc, subjects_dir=subjects_dir)

###############################################################################
# Average the source estimates within each label of the cortical parcellation
# and each sub structure contained in the src space

src = inverse_operator['src']

label_ts = mne.extract_label_time_course(
    [stc], labels_parc, src, mode='mean', allow_empty=True)

# plot the times series of 2 labels
fig, axes = plt.subplots(1)
axes.plot(1e3 * stc.times, label_ts[0][0, :], 'k', label='bankssts-lh')
axes.plot(1e3 * stc.times, label_ts[0][71, :].T, 'r', label='Brain-stem')
axes.set(xlabel='Time (ms)', ylabel='MNE current (nAm)')
axes.legend()
mne.viz.tight_layout()
コード例 #47
0
def _compute_rest_psd(subject, kind, freqs):

    ###########################################################################
    # Compute source space
    # -------------------
    src = mne.setup_source_space(subject,
                                 spacing='oct6',
                                 add_dist=False,
                                 subjects_dir=cfg.mne_camcan_freesurfer_path)
    trans = trans_map[subject]
    bem = cfg.mne_camcan_freesurfer_path + \
        "/%s/bem/%s-meg-bem.fif" % (subject, subject)

    ###########################################################################
    # Compute handle MEG data
    # -----------------------

    fname = op.join(cfg.camcan_meg_raw_path, subject, kind,
                    '%s_raw.fif' % kind)

    raw = mne.io.read_raw_fif(fname)
    mne.channels.fix_mag_coil_types(raw.info)
    if DEBUG:
        # raw.crop(0, 180)
        raw.crop(0, 120)
    else:
        raw.crop(0, 300)

    raw = _run_maxfilter(raw, subject, kind)
    _compute_add_ssp_exg(raw)

    # get empty room
    fname_er = op.join(cfg.camcan_meg_path, "emptyroom", subject,
                       "emptyroom_%s.fif" % subject)

    raw_er = mne.io.read_raw_fif(fname_er)
    mne.channels.fix_mag_coil_types(raw.info)

    raw_er = _run_maxfilter(raw_er, subject, kind, coord_frame="meg")
    raw_er.info["projs"] += raw.info["projs"]

    cov = mne.compute_raw_covariance(raw_er, method='oas')
    # compute before band-pass of interest

    event_length = 5.
    event_overlap = 0.
    raw_length = raw.times[-1]
    events = mne.make_fixed_length_events(raw,
                                          duration=event_length,
                                          start=0,
                                          stop=raw_length - event_length)

    #######################################################################
    # Compute the forward and inverse
    # -------------------------------

    info = mne.Epochs(raw,
                      events=events,
                      tmin=0,
                      tmax=event_length,
                      baseline=None,
                      reject=None,
                      preload=False,
                      decim=10).info
    fwd = mne.make_forward_solution(info, trans, src, bem)
    inv = make_inverse_operator(info, fwd, cov)
    del fwd

    #######################################################################
    # Compute label time series and do envelope correlation
    # -----------------------------------------------------
    mne_subjects_dir = "/storage/inria/agramfor/MNE-sample-data/subjects"
    labels = mne.read_labels_from_annot('fsaverage',
                                        'aparc_sub',
                                        subjects_dir=mne_subjects_dir)
    labels = mne.morph_labels(labels,
                              subject_from='fsaverage',
                              subject_to=subject,
                              subjects_dir=cfg.mne_camcan_freesurfer_path)
    labels = [ll for ll in labels if 'unknown' not in ll.name]

    for fmin, fmax, band in freqs:
        print(f"computing {subject}: {fmin} - {fmax} Hz")
        this_raw = raw.copy()
        this_raw.filter(fmin, fmax, n_jobs=1)
        reject = _get_global_reject_epochs(this_raw, decim=5)
        epochs = mne.Epochs(this_raw,
                            events=events,
                            tmin=0,
                            tmax=event_length,
                            baseline=None,
                            reject=reject,
                            preload=True,
                            decim=5)
        if DEBUG:
            epochs = epochs[:3]

        data_cov = mne.compute_covariance(epochs, method='oas')
        stc = _apply_inverse_cov(cov=data_cov,
                                 info=epochs.info,
                                 nave=1,
                                 inverse_operator=inv,
                                 lambda2=1. / 9.,
                                 pick_ori=None,
                                 method='MNE')
        assert np.all(stc.data < 0)

        label_power = mne.extract_label_time_course(
            stc, labels, inv['src'],
            mode="mean")  # XXX signal should be positive

        out_fname = op.join(
            cfg.derivative_path, f'{subject + ("-debug" if DEBUG else "")}_'
            f'cov_mne_{band}.h5')

        mne.externals.h5io.write_hdf5(out_fname, {
            'power': label_power,
            'subject': subject,
            'fmin': fmin,
            'fmax': fmax,
            "band": band,
            'label_names': [ll.name for ll in labels]
        },
                                      overwrite=True)
コード例 #48
0
ファイル: extract_rois.py プロジェクト: TinnErlangen/ATT
                                     subject_to="fsaverage",
                                     spacing=5,
                                     subjects_dir=subjects_dir,
                                     smooth=None)
    idx = 0
    for cond_idx, cond in enumerate(conds):
        X_temp = []
        for fr in f_ranges:
            stc_temp = mne.read_source_estimate(
                "{dir}stcs/nc_{a}_{b}_i{f0}-{f1}Hz_{d}-lh.stc".format(
                    dir=proc_dir, a=sub, b=cond, f0=fr[0], f1=fr[1],
                    d=spacing))
            stc_temp = morph.apply(stc_temp)
            X_temp.append(
                mne.extract_label_time_course(stc_temp,
                                              these_labels,
                                              fs_src,
                                              mode="mean"))
        X[idx].append(np.hstack(X_temp))
        idx += 1
XX = [np.array(x) for x in X]

subjs_t = subjs + ["mean"]
band_division = {
    "theta": [1, 5],
    "alpha": [5, 12],
    "beta": [13, 28],
    "low gamma": [28, 46],
    "high gamma": [46, -1]
}
band_division = {"alpha": [2, 11]}
band_division = {"theta-alpha": [0, 12], "beta": [13, 28]}
コード例 #49
0
#
stcs_nrm = apply_inverse_epochs(epochs_nrm, inverse_nrm, lambda2, method, pick_ori="normal", return_generator=False)
stcs_hyp = apply_inverse_epochs(epochs_hyp, inverse_hyp, lambda2, method, pick_ori="normal", return_generator=False)


# resample
[stc.resample(300) for stc in stcs_nrm]
[stc.resample(300) for stc in stcs_hyp]

# Get labels from FreeSurfer cortical parcellation
labels = mne.read_labels_from_annot("subject_1", parc="PALS_B12_Brodmann", regexp="Brodmann", subjects_dir=subjects_dir)

# Average the source estimates within eachh label using sign-flips to reduce
# signal cancellations, also here we return a generator
src_nrm = inverse_nrm["src"]
label_ts_nrm = mne.extract_label_time_course(stcs_nrm, labels, src_nrm, mode="mean_flip", return_generator=False)

src_hyp = inverse_hyp["src"]
label_ts_hyp = mne.extract_label_time_course(stcs_hyp, labels, src_hyp, mode="mean_flip", return_generator=False)

# standardize TS's
label_ts_nrm_rescaled = []
for j in range(len(label_ts_nrm)):
    label_ts_nrm_rescaled += [rescale(label_ts_nrm[j], epochs_nrm.times, baseline=(None, -0.5), mode="zscore")]

label_ts_hyp_rescaled = []
for j in range(len(label_ts_hyp)):
    label_ts_hyp_rescaled += [rescale(label_ts_hyp[j], epochs_hyp.times, baseline=(None, -0.5), mode="zscore")]


from_time = np.abs(stcs_nrm[0].times + 0).argmin()