Exemple #1
0
def test_pick_channels_csd():
    """Test selecting channels from a CrossSpectralDensity."""
    csd = _make_csd()
    csd = pick_channels_csd(csd, ['CH1', 'CH3'])
    assert csd.ch_names == ['CH1', 'CH3']
    assert_array_equal(csd._data,
                       [[0, 6, 12, 18], [2, 8, 14, 20], [5, 11, 17, 23]])
Exemple #2
0
def test_pick_channels_csd():
    """Test selecting channels from a CrossSpectralDensity."""
    csd = _make_csd()
    csd = pick_channels_csd(csd, ['CH1', 'CH3'])
    assert csd.ch_names == ['CH1', 'CH3']
    assert_array_equal(csd._data, [[0, 6, 12, 18],
                                   [2, 8, 14, 20],
                                   [5, 11, 17, 23]])
Exemple #3
0
    fwd_tan = conpy.forward_to_tangential(fwd_r)
    # get pairs for connectivity calculation
    pairs = np.load("{}NEMO_ico4_connectivity_pairs.npy".format(meg_dir))
    # pairs are defined in fsaverage space, map them to the source space of the current subject
    fsaverage_to_subj = conpy.utils.get_morph_src_mapping(
        fs_src, fwd_tan['src'], indices=True, subjects_dir=mri_dir)[0]
    pairs = [[fsaverage_to_subj[v] for v in pairs[0]],
             [fsaverage_to_subj[v] for v in pairs[1]]]
    for cond in exp_conds:
        print("Calculations for Condition: ", cond)
        for freq, vals in freqs.items():
            print("Calculations for Frequency: ", freq)
            csd = read_csd("{dir}nc_{meg}-csd_{cond}_{freq}.h5".format(
                dir=meg_dir, meg=meg, cond=cond, freq=freq))
            csd = csd.mean()
            csd = pick_channels_csd(csd, fwd_tan['info']['ch_names'])
            con = conpy.dics_connectivity(vertex_pairs=pairs,
                                          fwd=fwd_tan,
                                          data_csd=csd,
                                          reg=0.05,
                                          n_jobs=8)
            con.save("{dir}nc_{meg}_{cond}_{freq}-connectivity.h5".format(
                dir=meg_dir, meg=meg, cond=cond, freq=freq))

    # for the baseline conditions:
    print("Running Baseline Conditions")
    fwd_r = mne.read_forward_solution(
        "{dir}nc_{meg}_from-fs_ico4_bas-r-fwd.fif".format(dir=meg_dir,
                                                          meg=meg))
    # convert the forward model to one that defines two orthogonal dipoles at each source, that are tangential to a sphere
    fwd_tan = conpy.forward_to_tangential(fwd_r)
Exemple #4
0
def dics_connectivity(vertex_pairs,
                      fwd,
                      data_csd,
                      reg=0.05,
                      n_angles=50,
                      block_size=10000,
                      n_jobs=1,
                      verbose=None):
    """Compute spectral connectivity using a DICS beamformer.

    Calculates the connectivity between the given vertex pairs using a DICS
    beamformer [1]_ [2]_. Connectivity is defined in terms of coherence:

    C = Sxy^2 [Sxx * Syy]^-1

    Where Sxy is the cross-spectral density (CSD) between dipoles x and y, Sxx
    is the power spectral density (PSD) at dipole x and Syy is the PSD at
    dipole y.

    Parameters
    ----------
    vertex_pairs : pair of lists (vert_from_idx, vert_to_idx)
        Vertex pairs between which connectivity is calculated. The pairs are
        specified using two lists: the first list contains, for each pair, the
        index of the first vertex. The second list contains, for each pair, the
        index of the second vertex.
    fwd : instance of Forward
        Subject's forward solution, possibly restricted to only include
        vertices that are close to the sensors. For 'canonical' mode, the
        orientation needs to be tangential or free.
    data_csd : instance of CrossSpectralDensity
        The cross spectral density of the data.
    reg : float
        Tikhonov regularization parameter to control for trade-off between
        spatial resolution and noise sensitivity. Defaults to 0.05.
    n_angles : int
        Number of angles to try when optimizing dipole orientations. Defaults
        to 50.
    block_size : int
        Number of pairs to process in a single batch. Beware of memory
        requirements, which are ``n_jobs * block_size``. Defaults to 10000.
    n_jobs : int
        Number of blocks to process simultaneously. Defaults to 1.
    verbose : bool | str | int | None
        If not None, override default verbose level (see :func:`mne.verbose`
        and :ref:`Logging documentation <tut_logging>` for more).

    Returns
    -------
    connectivity : instance of Connectivity
        The adjacency matrix.

    See Also
    --------
    all_to_all_connectivity_pairs : Obtain pairs for all-to-all connectivity.
    one_to_all_connectivity_pairs : Obtain pairs for one-to-all connectivity.

    References
    ----------
    .. [1] Gross, J., Kujala, J., Hamalainen, M., Timmermann, L., Schnitzler,
           A., & Salmelin, R. (2001). Dynamic imaging of coherent sources:
           Studying neural interactions in the human brain. Proceedings of the
           National Academy of Sciences, 98(2), 694–699.
    .. [2] Kujala, J., Gross, J., & Salmelin, R. (2008). Localization of
           correlated network activity at the cortical level with MEG.
           NeuroImage, 39(4), 1706–1720.
    """
    fwd = pick_channels_forward(fwd, data_csd.ch_names)
    data_csd = pick_channels_csd(data_csd, fwd['info']['ch_names'])

    vertex_from, vertex_to = vertex_pairs
    if len(vertex_from) != len(vertex_to):
        raise ValueError('Lengths of the two lists of vertices do not match.')
    n_pairs = len(vertex_from)

    G = fwd['sol']['data'].copy()
    n_orient = G.shape[1] // fwd['nsource']

    if n_orient == 1:
        raise ValueError('A forward operator with free or tangential '
                         'orientation must be used.')
    elif n_orient == 3:
        # Convert forward to tangential orientation for more speed.
        fwd = forward_to_tangential(fwd)
        G = fwd['sol']['data']
        n_orient = 2

    G = G.reshape(G.shape[0], fwd['nsource'], n_orient)

    # Normalize the lead field
    G /= np.linalg.norm(G, axis=0)

    Cm = data_csd.get_data()
    Cm_inv, alpha = _reg_pinv(Cm, reg)
    del Cm

    W = np.dot(G.T, Cm_inv)

    # Pre-compute spectral power at each unique vertex
    unique_verts, vertex_map = np.unique(np.r_[vertex_from, vertex_to],
                                         return_inverse=True)
    spec_power_inv = np.array(
        [np.dot(W[:, vert, :], G[:, vert, :]) for vert in unique_verts])

    # Map vertex indices to unique indices, so the pre-computed spectral power
    # can be retrieved
    vertex_from_map = vertex_map[:len(vertex_from)]
    vertex_to_map = vertex_map[len(vertex_from):]

    coherence = np.zeros((len(vertex_from)))

    # Define a search space for dipole orientations
    angles = np.arange(n_angles) * np.pi / n_angles
    orientations = np.vstack((np.sin(angles), np.cos(angles)))

    # Create chunks of pairs to evaluate at once
    n_blocks = int(np.ceil(n_pairs / float(block_size)))
    blocks = [
        slice(i * block_size, min((i + 1) * block_size, n_pairs))
        for i in range(n_blocks)
    ]

    parallel, my_compute_dics_coherence, _ = parallel_func(
        _compute_dics_coherence, n_jobs, verbose)

    logger.info('Computing coherence between %d source pairs in %d blocks...' %
                (n_pairs, n_blocks))
    if numba_enabled:
        logger.info('Using numba optimized code path.')
    coherence = np.hstack(
        parallel(
            my_compute_dics_coherence(W, G, vertex_from_map[block],
                                      vertex_to_map[block], spec_power_inv,
                                      orientations) for block in blocks))
    logger.info('[done]')

    return VertexConnectivity(
        data=coherence,
        pairs=[v[:len(coherence)] for v in vertex_pairs],
        vertices=[s['vertno'] for s in fwd['src']],
        vertex_degree=None,  # Compute this in the constructor
        subject=fwd['src'][0]['subject_his_id'],
    )
Exemple #5
0
from matplotlib import pyplot as plt
import mne
from mne.time_frequency import read_csd, pick_channels_csd

from config import fname, subjects, freq_bands

info = mne.io.read_info(fname.epo(subject=subjects[0]))
grads = [info['ch_names'][ch] for ch in mne.pick_types(info, meg='grad')]
csd = read_csd(fname.csd(subject=subjects[0], condition='face'))
csd = pick_channels_csd(csd, grads)
csd = csd.mean([f[0] for f in freq_bands], [f[1] for f in freq_bands])

# Plot theta, alpha, low beta
csd[:3].plot(info, n_cols=3, show=False)
plt.savefig('../paper/figures/csd1.pdf', bbox_inches='tight')

# Plot high beta 1, high beta 2 and low gamma
csd[3:].plot(info, n_cols=3, show=False)
plt.savefig('../paper/figures/csd2.pdf', bbox_inches='tight')