def test_reg_pinv(): """Test regularization and inversion of covariance matrix.""" # create rank-deficient array a = np.array([[1., 0., 1.], [0., 1., 0.], [1., 0., 1.]]) # Test if rank-deficient matrix without regularization throws # specific warning with warnings.catch_warnings(record=True) as w: _reg_pinv(a, reg=0.) assert_true(any('deficient' in str(ww.message) for ww in w))
def dics_connectivity(vertex_pairs, fwd, data_csd, reg=0.05, n_angles=50, block_size=10000, n_jobs=1, verbose=None): """Compute spectral connectivity using a DICS beamformer. Calculates the connectivity between the given vertex pairs using a DICS beamformer [1]_ [2]_. Connectivity is defined in terms of coherence: C = Sxy^2 [Sxx * Syy]^-1 Where Sxy is the cross-spectral density (CSD) between dipoles x and y, Sxx is the power spectral density (PSD) at dipole x and Syy is the PSD at dipole y. Parameters ---------- vertex_pairs : pair of lists (vert_from_idx, vert_to_idx) Vertex pairs between which connectivity is calculated. The pairs are specified using two lists: the first list contains, for each pair, the index of the first vertex. The second list contains, for each pair, the index of the second vertex. fwd : instance of Forward Subject's forward solution, possibly restricted to only include vertices that are close to the sensors. For 'canonical' mode, the orientation needs to be tangential or free. data_csd : instance of CrossSpectralDensity The cross spectral density of the data. reg : float Tikhonov regularization parameter to control for trade-off between spatial resolution and noise sensitivity. Defaults to 0.05. n_angles : int Number of angles to try when optimizing dipole orientations. Defaults to 50. block_size : int Number of pairs to process in a single batch. Beware of memory requirements, which are ``n_jobs * block_size``. Defaults to 10000. n_jobs : int Number of blocks to process simultaneously. Defaults to 1. verbose : bool | str | int | None If not None, override default verbose level (see :func:`mne.verbose` and :ref:`Logging documentation <tut_logging>` for more). Returns ------- connectivity : instance of Connectivity The adjacency matrix. See Also -------- all_to_all_connectivity_pairs : Obtain pairs for all-to-all connectivity. one_to_all_connectivity_pairs : Obtain pairs for one-to-all connectivity. References ---------- .. [1] Gross, J., Kujala, J., Hamalainen, M., Timmermann, L., Schnitzler, A., & Salmelin, R. (2001). Dynamic imaging of coherent sources: Studying neural interactions in the human brain. Proceedings of the National Academy of Sciences, 98(2), 694–699. .. [2] Kujala, J., Gross, J., & Salmelin, R. (2008). Localization of correlated network activity at the cortical level with MEG. NeuroImage, 39(4), 1706–1720. """ fwd = pick_channels_forward(fwd, data_csd.ch_names) data_csd = pick_channels_csd(data_csd, fwd['info']['ch_names']) vertex_from, vertex_to = vertex_pairs if len(vertex_from) != len(vertex_to): raise ValueError('Lengths of the two lists of vertices do not match.') n_pairs = len(vertex_from) G = fwd['sol']['data'].copy() n_orient = G.shape[1] // fwd['nsource'] if n_orient == 1: raise ValueError('A forward operator with free or tangential ' 'orientation must be used.') elif n_orient == 3: # Convert forward to tangential orientation for more speed. fwd = forward_to_tangential(fwd) G = fwd['sol']['data'] n_orient = 2 G = G.reshape(G.shape[0], fwd['nsource'], n_orient) # Normalize the lead field G /= np.linalg.norm(G, axis=0) Cm = data_csd.get_data() Cm_inv, alpha = _reg_pinv(Cm, reg) del Cm W = np.dot(G.T, Cm_inv) # Pre-compute spectral power at each unique vertex unique_verts, vertex_map = np.unique(np.r_[vertex_from, vertex_to], return_inverse=True) spec_power_inv = np.array( [np.dot(W[:, vert, :], G[:, vert, :]) for vert in unique_verts]) # Map vertex indices to unique indices, so the pre-computed spectral power # can be retrieved vertex_from_map = vertex_map[:len(vertex_from)] vertex_to_map = vertex_map[len(vertex_from):] coherence = np.zeros((len(vertex_from))) # Define a search space for dipole orientations angles = np.arange(n_angles) * np.pi / n_angles orientations = np.vstack((np.sin(angles), np.cos(angles))) # Create chunks of pairs to evaluate at once n_blocks = int(np.ceil(n_pairs / float(block_size))) blocks = [ slice(i * block_size, min((i + 1) * block_size, n_pairs)) for i in range(n_blocks) ] parallel, my_compute_dics_coherence, _ = parallel_func( _compute_dics_coherence, n_jobs, verbose) logger.info('Computing coherence between %d source pairs in %d blocks...' % (n_pairs, n_blocks)) if numba_enabled: logger.info('Using numba optimized code path.') coherence = np.hstack( parallel( my_compute_dics_coherence(W, G, vertex_from_map[block], vertex_to_map[block], spec_power_inv, orientations) for block in blocks)) logger.info('[done]') return VertexConnectivity( data=coherence, pairs=[v[:len(coherence)] for v in vertex_pairs], vertices=[s['vertno'] for s in fwd['src']], vertex_degree=None, # Compute this in the constructor subject=fwd['src'][0]['subject_his_id'], )