예제 #1
0
	def getOrthEnvelope(self, Index, ReferenceIndex, FreqBand, pad=100, LowPass=False):
		"""
		Function to compute the Orthogonalized Envelope of the indexed signal with respect to a reference signal.
		Is used create a plot the orthogonalized Envelope.
		"""
		Limits = FreqBand
		# Filter signal
		FilteredSignal = self.getFrequencyBand(Limits)
		# Get complex signal
		n_fft = next_fast_len(self.TimePoints)
		ComplexSignal = hilbert(FilteredSignal, N=n_fft, axis=-1)[:, :self.TimePoints]
		ComplexSignal = ComplexSignal[:,pad:-pad]

		# Get signal envelope and conjugate
		SignalEnv = np.abs(ComplexSignal)
		SignalConj = ComplexSignal.conj()

		OrthSignal = (ComplexSignal[Index] * (SignalConj[ReferenceIndex] / SignalEnv[ReferenceIndex])).imag
		OrthEnv = np.abs(OrthSignal)

		if LowPass:
			OrthEnv = filter_data(OrthEnv, self.fsample, 0, self.lowpass, fir_window='hamming', verbose=False)
			ReferenceEnv = filter_data(SignalEnv[ReferenceIndex], self.fsample, 0, self.lowpass, fir_window='hamming', verbose=False)
		else:
			ReferenceEnv = SignalEnv

		return OrthEnv, ReferenceEnv
예제 #2
0
	def getEnvFC(self, Limits, conn_mode):
		"""
		Computes the Functional Connectivity Matrix based on the signal envelope
		Takes settings from configuration File. If conn_mode contains 
		orth the signal is orthogonalized in parallel using _parallel_orth_corr
		"""
		# Filter signal
		FilteredSignal = self.getFrequencyBand(Limits)

		# Get complex signal
		n_fft = next_fast_len(self.TimePoints)
		ComplexSignal = hilbert(FilteredSignal, N=n_fft, axis=-1)[:, :self.TimePoints]
		pad=100
		ComplexSignal = ComplexSignal[:,pad:-pad]

		# Get signal envelope
		SignalEnv = np.abs(ComplexSignal)
		
		# If no conn_mode is specified, unorthogonalized FC is computed.
		if conn_mode in ['lowpass-corr', 'corr']:			
			if 'lowpass' in conn_mode: 
				SignalEnv = filter_data(SignalEnv, self.fsample, 0, self.lowpass, fir_window='hamming', verbose=False)			
			FC = pearson(SignalEnv, SignalEnv)
			return FC

		SignalConj = ComplexSignal.conj()
		ConjdivEnv = SignalConj/SignalEnv 

		# Compute orthogonalization and correlation in parallel		
		with Pool(processes=10) as p: 
			result = p.starmap(self._parallel_orth_corr, [(Complex, SignalEnv, ConjdivEnv) for Complex in ComplexSignal])
		FC = np.array(result)

		# Make the Corr Matrix symmetric
		FC = (FC.T + FC) / 2.
		return FC
raw_erms['vv'].load_data().resample(new_sfreq)
raw_erms['vv'].info['bads'] = ['MEG2233', 'MEG1842']

raws['opm'] = mne.io.read_raw_fif(opm_fname)
raws['opm'].load_data().resample(new_sfreq)
raw_erms['opm'] = mne.io.read_raw_fif(opm_erm_fname)
raw_erms['opm'].load_data().resample(new_sfreq)
# Make sure our assumptions later hold
assert raws['opm'].info['sfreq'] == raws['vv'].info['sfreq']

##############################################################################
# Explore data

titles = dict(vv='VectorView', opm='OPM')
kinds = ('vv', 'opm')
n_fft = next_fast_len(int(round(4 * new_sfreq)))
print('Using n_fft=%d (%0.1f sec)' % (n_fft, n_fft / raws['vv'].info['sfreq']))
for kind in kinds:
    fig = raws[kind].plot_psd(n_fft=n_fft, proj=True)
    fig.suptitle(titles[kind])
    fig.subplots_adjust(0.1, 0.1, 0.95, 0.85)

##############################################################################
# Alignment and forward
# ---------------------

# Here we use a reduced size source space (oct5) just for speed
src = mne.setup_source_space(
    subject, 'oct5', add_dist=False, subjects_dir=subjects_dir)
# This line removes source-to-source distances that we will not need.
# We only do it here to save a bit of memory, in general this is not required.
예제 #4
0
def envelope_correlation(data,
                         combine='mean',
                         orthogonalize="pairwise",
                         verbose=None):
    """Compute the envelope correlation.
    Parameters
    ----------
    data : array-like, shape=(n_epochs, n_signals, n_times) | generator
        The data from which to compute connectivity.
        The array-like object can also be a list/generator of array,
        each with shape (n_signals, n_times), or a :class:`~mne.SourceEstimate`
        object (and ``stc.data`` will be used). If it's float data,
        the Hilbert transform will be applied; if it's complex data,
        it's assumed the Hilbert has already been applied.
    combine : 'mean' | callable | None
        How to combine correlation estimates across epochs.
        Default is 'mean'. Can be None to return without combining.
        If callable, it must accept one positional input.
        For example::
            combine = lambda data: np.median(data, axis=0)
    orthoganalize : 'pairwise' | False
        Whether to orthogonalize with the pairwise method or not.
        Defaults to 'pairwise'. Note that when False,
        the correlation matrix will not be returned with
        absolute values.

    %(verbose)s
    Returns
    -------
    corr : ndarray, shape ([n_epochs, ]n_nodes, n_nodes)
        The pairwise orthogonal envelope correlations.
        This matrix is symmetric. If combine is None, the array
        with have three dimensions, the first of which is ``n_epochs``.
    Notes
    -----
    This function computes the power envelope correlation between
    orthogonalized signals [1]_ [2]_.
    References
    ----------
    .. [1] Hipp JF, Hawellek DJ, Corbetta M, Siegel M, Engel AK (2012)
           Large-scale cortical correlation structure of spontaneous
           oscillatory activity. Nature Neuroscience 15:884–890
    .. [2] Khan S et al. (2018). Maturation trajectories of cortical
           resting-state networks depend on the mediating frequency band.
           Neuroimage 174:57–68
    """
    from scipy.signal import hilbert
    n_nodes = None
    if combine is not None:
        fun = _check_combine(combine, valid=('mean', ))
    else:  # None
        fun = np.array

    corrs = list()
    # Note: This is embarassingly parallel, but the overhead of sending
    # the data to different workers is roughly the same as the gain of
    # using multiple CPUs. And we require too much GIL for prefer='threading'
    # to help.
    for ei, epoch_data in enumerate(data):
        if isinstance(epoch_data, _BaseSourceEstimate):
            epoch_data = epoch_data.data
        if epoch_data.ndim != 2:
            raise ValueError('Each entry in data must be 2D, got shape %s' %
                             (epoch_data.shape, ))
        n_nodes, n_times = epoch_data.shape
        if ei > 0 and n_nodes != corrs[0].shape[0]:
            raise ValueError('n_nodes mismatch between data[0] and data[%d], '
                             'got %s and %s' %
                             (ei, n_nodes, corrs[0].shape[0]))
        # Get the complex envelope (allowing complex inputs allows people
        # to do raw.apply_hilbert if they want)
        if epoch_data.dtype in (np.float32, np.float64):
            n_fft = next_fast_len(n_times)
            epoch_data = hilbert(epoch_data, N=n_fft, axis=-1)[..., :n_times]

        if orthogonalize is False:
            corrs.append(np.corrcoef(np.abs(epoch_data)))
            continue

        if epoch_data.dtype not in (np.complex64, np.complex128):
            raise ValueError('data.dtype must be float or complex, got %s' %
                             (epoch_data.dtype, ))
        data_mag = np.abs(epoch_data)
        data_conj_scaled = epoch_data.conj()
        data_conj_scaled /= data_mag
        # subtract means
        data_mag_nomean = data_mag - np.mean(data_mag, axis=-1, keepdims=True)
        # compute variances using linalg.norm (square, sum, sqrt) since mean=0
        data_mag_std = np.linalg.norm(data_mag_nomean, axis=-1)
        data_mag_std[data_mag_std == 0] = 1
        corr = np.empty((n_nodes, n_nodes))
        for li, label_data in enumerate(epoch_data):
            label_data_orth = (label_data * data_conj_scaled).imag
            label_data_orth -= np.mean(label_data_orth, axis=-1, keepdims=True)
            label_data_orth_std = np.linalg.norm(label_data_orth, axis=-1)
            label_data_orth_std[label_data_orth_std == 0] = 1
            # correlation is dot product divided by variances
            corr[li] = np.dot(label_data_orth, data_mag_nomean[li])
            corr[li] /= data_mag_std[li]
            corr[li] /= label_data_orth_std
        # Make it symmetric (it isn't at this point)
        corr = np.abs(corr)
        corrs.append((corr.T + corr) / 2.)
        del corr

    corr = fun(corrs)
    return corr
예제 #5
0
	def getEnvelope(self, Limits):
		filteredSignal = self.getFrequencyBand(Limits)
		n_fft = next_fast_len(self.TimePoints)
		complex_signal = hilbert(filteredSignal, N=n_fft, axis=-1)
		filteredEnvelope = np.abs(complex_signal)
		return filteredEnvelope
예제 #6
0
# the output contains both projector types (and also the original empty-room
# projectors)
ssp_ecg_eog, _ = mne.preprocessing.compute_proj_eog(
    raws['vv'], n_grad=1, n_mag=1, ch_name='MEG0112')
raws['vv'].add_proj(ssp_ecg_eog, remove_existing=True)
raw_erms['vv'].add_proj(ssp_ecg_eog)
fig = mne.viz.plot_projs_topomap(raws['vv'].info['projs'][-4:],
                                 info=raws['vv'].info)
fig.suptitle(titles['vv'])
fig.subplots_adjust(0.05, 0.05, 0.95, 0.85)

##############################################################################
# Explore data

kinds = ('vv', 'opm')
n_fft = next_fast_len(int(round(4 * new_sfreq)))
print('Using n_fft=%d (%0.1f sec)' % (n_fft, n_fft / raws['vv'].info['sfreq']))
for kind in kinds:
    fig = raws[kind].plot_psd(n_fft=n_fft, proj=True)
    fig.suptitle(titles[kind])
    fig.subplots_adjust(0.1, 0.1, 0.95, 0.85)

##############################################################################
# Alignment and forward
# ---------------------

src = mne.read_source_spaces(src_fname)
bem = mne.read_bem_solution(bem_fname)
fwd = dict()
trans = dict(vv=vv_trans_fname, opm=opm_trans_fname)
# check alignment and generate forward
예제 #7
0
def compute_src_label_ts(subject,
                         crop_to=[0, 250],
                         resample_to=100.,
                         bads=None,
                         mag_reject=5e-12,
                         win_len=2000,
                         n_wins=11,
                         verbose=None,
                         lambda2=1. / 9.,
                         inv_method='dSPM',
                         extract_ts_mode='mean_flip'):
    """
    Compute source label time series
    """
    """
    Compute anatomy
    """

    hcp.make_mne_anatomy(subject,
                         subjects_dir=subjects_dir,
                         hcp_path=hcp_path,
                         recordings_path=hcp_path)
    """
    Read surface labels
    """
    labels = read_labels_from_annot(subject,
                                    parc='aparc',
                                    subjects_dir=subjects_dir)
    labels_fsav = read_labels_from_annot('fsaverage',
                                         parc='aparc',
                                         subjects_dir=subjects_dir)
    """
    Read raw data
    """

    raw = hcp.read_raw(subject=subject,
                       data_type=data_type,
                       hcp_path=hcp_path,
                       run_index=run_index)

    raw.load_data()

    raw.crop(crop_to[0], crop_to[1])

    raw.resample(resample_to)

    raw.info['bads'] = bads

    hcp.preprocessing.set_eog_ecg_channels(raw)

    hcp.preprocessing.apply_ref_correction(raw)

    info = raw.info.copy()

    raw.info['projs'] = []

    ecg_ave = create_ecg_epochs(raw).average()

    eog_ave = create_eog_epochs(raw).average()

    ssp_eog, _ = compute_proj_eog(raw,
                                  n_grad=1,
                                  n_mag=1,
                                  average=True,
                                  reject=dict(mag=mag_reject))
    raw.add_proj(ssp_eog, remove_existing=True)

    n_fft = next_fast_len(int(round(4 * raw.info['sfreq'])))

    sfreq = raw.info['sfreq']
    """
    Compute forward model
    """
    src_outputs = hcp.anatomy.compute_forward_stack(
        subject=subject,
        subjects_dir=subjects_dir,
        hcp_path=hcp_path,
        recordings_path=hcp_path,
        src_params=dict(add_dist=False),
        info_from=dict(data_type=data_type, run_index=run_index))
    fwd = src_outputs['fwd']
    """
    Compute noise covariance
    """
    raw_noise = hcp.read_raw(subject=subject,
                             hcp_path=hcp_path,
                             data_type='noise_empty_room')
    raw_noise.load_data()
    hcp.preprocessing.apply_ref_correction(raw_noise)
    raw_noise.add_proj(ssp_eog)
    noise_cov = compute_raw_covariance(raw_noise, method='oas')
    """
    Compute inverse operator
    """

    raw.info = info
    inv_op = make_inverse_operator(raw.info,
                                   forward=fwd,
                                   noise_cov=noise_cov,
                                   verbose=verbose)
    """
    Compute source activity
    """

    wins = [[0, win_len]]
    for i in range(n_wins):
        new_wins = [
            wins[0][0] + (win_len * (i + 1)), wins[0][1] + (win_len * (i + 1))
        ]
        wins.append(new_wins)

    raw_srcs = []
    for win in wins:
        res = apply_inverse_raw(raw,
                                inv_op,
                                lambda2=lambda2,
                                method=inv_method,
                                label=None,
                                start=win[0],
                                stop=win[1],
                                nave=1,
                                time_func=None,
                                pick_ori=None,
                                buffer_size=None,
                                prepared=False,
                                method_params=None,
                                verbose=verbose)
        raw_srcs.append(res)
    """
    Compute source label time series
    """
    src = inv_op['src']
    label_ts = extract_label_time_course(raw_srcs,
                                         labels,
                                         src,
                                         mode=extract_ts_mode,
                                         return_generator=False)

    return label_ts, sfreq