Exemple #1
0
    def to_nitime(self,
                  picks=None,
                  epochs_idx=None,
                  collapse=False,
                  copy=True,
                  use_first_samp=False):
        """ Export epochs as nitime TimeSeries

        Parameters
        ----------
        picks : array-like | None
            Indices for exporting subsets of the epochs channels. If None
            all good channels will be used.
        epochs_idx : slice | array-like | None
            Epochs index for single or selective epochs exports. If None, all
            epochs will be used.
        collapse : boolean
            If True export epochs and time slices will be collapsed to 2D
            array. This may be required by some nitime functions.
        copy : boolean
            If True exports copy of epochs data.
        use_first_samp: boolean
            If True, the time returned is relative to the session onset, else
            relative to the recording onset.

        Returns
        -------
        epochs_ts : instance of nitime.TimeSeries
            The Epochs as nitime TimeSeries object
        """
        try:
            from nitime import TimeSeries  # to avoid strong dependency
        except ImportError:
            raise Exception('the nitime package is missing')

        if picks is None:
            picks = pick_types(self.info,
                               include=self.ch_names,
                               exclude=self.info['bads'])
        if epochs_idx is None:
            epochs_idx = slice(len(self.events))

        data = self.get_data()[epochs_idx, picks]

        if copy is True:
            data = data.copy()

        if collapse is True:
            data = np.hstack(data).copy()

        offset = self.raw.time_as_index(abs(self.tmin), use_first_samp)
        t0 = self.raw.index_as_time(self.events[0, 0] - offset)[0]
        epochs_ts = TimeSeries(data, sampling_rate=self.info['sfreq'], t0=t0)
        epochs_ts.ch_names = np.array(self.ch_names)[picks].tolist()

        return epochs_ts
Exemple #2
0
    def to_nitime(self, picks=None, epochs_idx=None, collapse=False, copy=True, use_first_samp=False):
        """ Export epochs as nitime TimeSeries

        Parameters
        ----------
        picks : array-like | None
            Indices for exporting subsets of the epochs channels. If None
            all good channels will be used.
        epochs_idx : slice | array-like | None
            Epochs index for single or selective epochs exports. If None, all
            epochs will be used.
        collapse : boolean
            If True export epochs and time slices will be collapsed to 2D
            array. This may be required by some nitime functions.
        copy : boolean
            If True exports copy of epochs data.
        use_first_samp: boolean
            If True, the time returned is relative to the session onset, else
            relative to the recording onset.

        Returns
        -------
        epochs_ts : instance of nitime.TimeSeries
            The Epochs as nitime TimeSeries object
        """
        try:
            from nitime import TimeSeries  # to avoid strong dependency
        except ImportError:
            raise Exception("the nitime package is missing")

        if picks is None:
            picks = pick_types(self.info, include=self.ch_names, exclude=self.info["bads"])
        if epochs_idx is None:
            epochs_idx = slice(len(self.events))

        data = self.get_data()[epochs_idx, picks]

        if copy is True:
            data = data.copy()

        if collapse is True:
            data = np.hstack(data).copy()

        offset = self.raw.time_as_index(abs(self.tmin), use_first_samp)
        t0 = self.raw.index_as_time(self.events[0, 0] - offset)[0]
        epochs_ts = TimeSeries(data, sampling_rate=self.info["sfreq"], t0=t0)
        epochs_ts.ch_names = np.array(self.ch_names)[picks].tolist()

        return epochs_ts
Exemple #3
0
    def to_nitime(self, picks=None):
        """ Export Evoked object to NiTime
        Parameters
        ----------
        picks : array-like | None
            Indices of channels to apply. If None, all channels will be
            exported.

        Retruns
        -------
        evoked_ts : instance of nitime.TimeSeries
        """
        try:
            from nitime import TimeSeries  # to avoid strong dependency
        except ImportError:
            raise Exception('the nitime package is missing')

        evoked_ts = TimeSeries(
            self.data if picks is None else self.data[picks],
            sampling_rate=self.info['sfreq'])
        return evoked_ts
Exemple #4
0
def adj_static(ts,
               measure='corr',
               pval=False,
               TR=2,
               fq_l=None,
               fq_u=None,
               order=1,
               scale=2,
               w=None,
               idp=None,
               excl_zero_cov=False):
    """returns a *static* graph representation (adjacency matrix)

    Parameters
    ----------
    ts : ndarray, shape(n_rois, n_tps)
        Pre-processed timeseries information
    measure : string
        Similarity measure for the adjacency matrix calculation.

        * ``corr``: Pearson product momement correlation coefficient [-1,1].
        * ``cov``: Covariance.
        * ``coh``: Coherence [0,1]
        * ``delay``: Coherence phase delay estimates. The metric was used in
          [3]_ and [4]_
        * ``granger``: Granger-causality. As suggested by [3]_ and [4]_ this
          returns the difference `F(x-->y)` and `F(y-->x)`.
        * ``wtc``: Correlation of the wavelet coefficients [-1,1]. The metric
          was used in [1]_
        * ``pcorr``: Partial correlation. Calculated using the inverse
          covariance matrix (precision matrix) [0,1]
        * ``pcoh``: Partial coherence in the range [0,1]. The metric was used
          in [2]_ #not impl yet
        * ``spcoh`` : Semi-partial coherence in the range [0,1]. The metric was
          used in [7]_. The correlation between two time-series is conditioned
          on a third time-series given by ``idp``.
        * ``ktau``: Kendall's tau, a correlation measure for ordinal data.
        * ``rho``: Spearman rank-order correlation coefficient rho [-1,1]. This
          is a nonparametric measure of the linear relationship between two
          datasets. Unlike e.g. the Pearson correlation, the Spearman
          correlation does not assume that both datasets are normally
          distributed.
        * ``mic``: Maximal information criterion [0,1]
        * ``non_linearity``: Non-linearity of the relationship [0,1]
        * ``mi``: Mutual information. The metric was used in [5]_
        * ``nmi``: Normalized mutual information [0,1]
        * ``ami``: Adjusted mutual information [0,1]
        * ``cmi`` Conditional mutual information. The metric was used in [6]_
          # not impl yet
        * ``graph_lasso``: Sparse inverse covariance matrix estimation with l1
          penalization using the GraphLasso. The connection of two nodes is
          estimated by conditioning on all other nodes [-1,1].
        * ``ledoit_wolf``: Sparse inverse covariance matrix estimation with l2
          shrinkage using Ledoit-Wolf [-1,1].
        * ``dcorr``: Distance correlation [0,1]. This metric can capture
          non-linear relationships.
        * ``dcov``: Distance covariance.
        * ``eu``: Euclidean distance.

    pval : boolean, optional
        return p-values, only available for ``corr``, ``wtc``, ``ktau``,``rho``
        and ``mic`` so far (default=False).
    TR : float
        Time to repeat: the sampling interval (only for ``coh``, ``pcoh``,
        ``delay`` and ``granger``).
    fq_l : float
        lower frequency bound (only for ``coh``, ``pcoh``, ``delay`` and
        ``granger``).
    fq_u : float
        upper frequency bound (only for ``coh``, ``pcoh``, ``delay`` and
        ``granger``).
    order : integer
        time-lag (only for ``measure='granger'``)
    scale : integer [1,2]
        Wavelet scale (only for ``measure='wtc'``)
    w : pywt.wavelet object
        default is pywt.Wavelet('db4')
    idp : integer
        Index of the timeseries to condition the semi-partial coherence on
        (only if ``measure='spcoh'``)
    excl_zero_cov : boolean (default: False)
        Automatically exclude node timeseries with zero covariance. Values in
        the adjacency matrix are set to zero.

    Returns
    -------
    A : ndarray, shape(n_rois, n_rois)
        Adjacency matrix of the graph.

    P : ndarray, shape(n_rois, n_rois)
        Statistical p-values (2-tailed) for the similarity measure. Only if
        ``pval=True``

    Notes
    -----
    The calculation runs faster if ``pval=False`` (default). The diagonal is
    always zero.

    See Also
    --------
    adj_dynamic: for a dynamic graph representation/adjacency matrix
    nt.timeseries.utils.cross_correlation_matrix: cross-correlation matrix

    References
    ----------
    .. [1] Bassett, D. S., Wymbs, N. F., Porter, M. A., Mucha, P. J., Carlson,
           J. M., & Grafton, S. T. (2011). Dynamic reconfiguration of human
           brain networks during learning. Proceedings of the National Academy
           of Sciences, 108(18), 7641–7646. doi:10.1073/pnas.1018985108
    .. [2] Salvador, R., Suckling, J., Schwarzbauer, C., & Bullmore, E. (2005).
           Undirected graphs of frequency-dependent functional connectivity in
           whole brain networks. Philosophical transactions of the Royal
           Society of London Series B, Biological sciences, 360(1457), 937–946.
           doi:10.1098/rstb.2005.1645
    .. [3] Kayser, A. S., Sun, F. T., & D'Esposito, M. (2009). A comparison of
           Granger causality and coherency in fMRI-based analysis of the motor
           system. Human Brain Mapping,30(11), 3475–3494. doi:10.1002/hbm.20771
    .. [4] Roebroeck, A., Formisano, E., & Goebel, R. (2005). Mapping directed
           influence over the brain using Granger causality and fMRI.
           NeuroImage, 25(1), 230–242. doi:10.1016/j.neuroimage.2004.11.017
    .. [5] Zamora-López, G., Zhou, C., & Kurths, J. (2010). Cortical hubs form
           a module for multisensory integration on top of the hierarchy of
           cortical networks. Frontiers in neuroinformatics, 4.
    .. [6] Salvador, R., Anguera, M., Gomar, J. J., Bullmore, E. T., &
           Pomarol-Clotet, E. (2010). Conditional Mutual Information Maps as
           Descriptors of Net Connectivity Levels in the Brain. Frontiers in
           neuroinformatics, 4. doi:10.3389/fninf.2010.00115
    .. [7] Sun, F. T., Miller, L. M., & D'Esposito, M. (2004). Measuring
           interregional functional connectivity using coherence and partial
           coherence analyses of fMRI data. NeuroImage, 21(2), 647–658.
           doi:10.1016/j.neuroimage.2003.09.056

    Examples
    --------
    >>> data = get_fmri_data()
    >>> d = percent_signal_change(data) # normalize data
    >>> print data.shape
    (31, 250) # 31 nodes and 250 time-points
    >>> # adjacency matrix based on correlation metric
    >>> A = adj_static(data, measure='corr')
    >>> print A.shape
    (31, 31) # node-wise connectivity matrix

    >>> # get adjacency matrix and p-values
    >>> A, P = adj_static(data, measure='corr', pval=True)
    >>> print P.shape
    (31, 31) # p-value for every edge in the adjacency matrix
    """

    data = deepcopy(ts)
    n_channel = data.shape[0]
    # n_tps = data.shape[1]

    # TODO think about option to automatically exclude zero covaraince nodes
    # especially important for granger
    n_nodes = data.shape[0]
    if excl_zero_cov:
        # test for zero covariance to exclude
        std = np.std(data, axis=1)
        idx = np.where(std != 0.0)[0]
        data = data[idx, :]

    # this performs just the wavelet transformation, the correlation part
    # is identical to measure='corr'
    # if measure == 'wtc':
    #     data = wavelet_transform(data, w=w, scale=scale)
    #     measure = 'corr' # perform correlation of wavelet transformed ts

    if measure == 'corr':

        # correlation = np.dot(x,y)/(np.dot(x,x) * np.dot(y,y))
        ADJ = np.corrcoef(data)

        if pval:
            # ADJ = np.zeros((data.shape[0], data.shape[0]))
            # P = np.zeros((data.shape[0], data.shape[0]))
            #
            # idx = tril_indices(data.shape[0], -1)
            # ADJ[idx] = -99 # save some running time by calculating only
            # for i in range(data.shape[0]): # the lower traingle
            #     for j in range(data.shape[0]):
            #         if ADJ[i,j] == -99:
            #             ADJ[i,j], P[i,j] = pearsonr(data[i,:], data[j,:])
            #
            # ADJ = ADJ + ADJ.T
            # P = P + P.T
            # P = pearsonr_2pval(ADJ, n=n_tps)
            # fill_diagonal(P,1)
            P = np.ones((n_channel, n_channel))

    elif measure == 'cov':
        # d = data.copy()
        # mean = d.mean(axis=1)
        # std = d.std(axis=1)
        # d -= mean.reshape(mean.shape[0], 1)
        # d /= std.reshape(mean.shape[0], 1)
        ADJ = np.cov(data)

    elif measure == 'pcorr':
        # data needs to be normalized?!
        # inv vs. pinv vs pinv2:
        # http://blog.vene.ro/2012/08/18/inverses-pseudoinverses-numerical
        # issues-speed-symmetry/
        ADJ = np.linalg.inv(np.cov(data))  # or pinv?
        d = 1 / np.sqrt(np.diag(ADJ))
        ADJ *= d
        ADJ *= d[:, np.newaxis]

        # TODO: this might be much faster
        # from scipy.linalg.lapack import get_lapack_funcs
        # getri, getrf = get_lapack_funcs(('getri', 'getrf'),
        #                                 (np.empty((), dtype=np.float64),
        #                                  np.empty((), dtype=np.float64)))
        #
        # covariance = np.cov(data)
        # lu, piv, _ = getrf(np.dot(covariance.T, covariance), True)
        # precision, _ = getri(lu, piv, overwrite_lu=True)
        # precision = np.dot(covariance, precision)

        # elif measure == 'ktau':
        #
        #     ADJ = np.zeros((data.shape[0], data.shape[0]))
        #     P = np.zeros((data.shape[0], data.shape[0]))
        #
        #     idx = tril_indices(data.shape[0], -1)
        #     ADJ[idx] = -99
        #     for i in range(data.shape[0]):
        #         for j in range(data.shape[0]):
        #             if ADJ[i, j] == -99:
        #                 ADJ[i, j], P[i, j] = kendalltau(data[i,:], data[j,:])

        ADJ = ADJ + ADJ.T
        # P = P + P.T
        # fill_diagonal(P, 1)

    elif measure == 'rho':

        ADJ = np.zeros((data.shape[0], data.shape[0]))
        P = np.zeros((data.shape[0], data.shape[0]))

        idx = tril_indices(data.shape[0], -1)
        # save some running time by calculating only the lower traingle
        ADJ[idx] = -99
        for i in range(data.shape[0]):
            for j in range(data.shape[0]):
                if ADJ[i, j] == -99:
                    ADJ[i, j], P[i, j] = spearmanr(data[i, :], data[j, :])

        ADJ = ADJ + ADJ.T
        P = P + P.T
        fill_diagonal(P, 1)

    elif measure == 'coh':
        from nitime import TimeSeries
        from nitime.analysis.coherence import CoherenceAnalyzer
        T = TimeSeries(data, sampling_interval=TR)
        # Initialize the coherence analyzer
        C = CoherenceAnalyzer(T)
        COH = C.coherence
        # remove Nan's
        COH[np.isnan(COH)] = 0.

        freq_idx = np.where((C.frequencies > fq_l) * (C.frequencies < fq_u))[0]
        # averaging over the last dimension (=frequency)
        ADJ = np.mean(COH[:, :, freq_idx], -1)

    if excl_zero_cov:
        ADJ = np.zeros((n_nodes, n_nodes))
        ADJ[idx] = ADJ

    fill_diagonal(ADJ, 0)
    ADJ[np.isnan(ADJ)] = 0.  # might occur if zero cov

    if pval:
        return ADJ, P
    else:
        return ADJ
Exemple #5
0
    ht_cls_bs = mne.baseline.rescale(ht_cls,
                                     times,
                                     baseline=(-3.8, -3.3),
                                     mode="mean")

    ht_pln_bs = mne.baseline.rescale(ht_pln,
                                     times,
                                     baseline=(-3.8, -3.3),
                                     mode="mean")
    ht_int_bs = mne.baseline.rescale(ht_pln,
                                     times,
                                     baseline=(-3.8, -3.3),
                                     mode="mean")

    for ts in ht_cls_bs:
        nits = TimeSeries(ts[:, tois[toi][0]:tois[toi][1]],
                          sampling_rate=1000)  # epochs_normal.info["sfreq"])

        corr_cls += [CorrelationAnalyzer(nits)]

    for ts in ht_pln_bs:
        nits = TimeSeries(ts[:, tois[toi][0]:tois[toi][1]],
                          sampling_rate=1000)  # epochs_normal.info["sfreq"])

        corr_pln += [CorrelationAnalyzer(nits)]

    for ts in ht_int_bs:
        nits = TimeSeries(ts[:, tois[toi][0]:tois[toi][1]],
                          sampling_rate=1000)  # epochs_normal.info["sfreq"])

        corr_int += [CorrelationAnalyzer(nits)]
    ge_pln = []
    cp_cls = []
    cp_pln = []

    for st in selected_times:
        if st + window_size < times[-1]:

            from_time = np.abs(times - st).argmin()
            to_time = np.abs(times - (st + window_size)).argmin()
            corr_cls = []
            corr_pln = []

            # make timeseries object
            for ts in cls_bs:
                nits = TimeSeries(
                    ts[:, from_time:to_time],
                    sampling_rate=1000)  # epochs_normal.info["sfreq"])

                corr_cls += [CorrelationAnalyzer(nits)]

            for ts in pln_bs:
                nits = TimeSeries(
                    ts[:, from_time:to_time],
                    sampling_rate=1000)  # epochs_normal.info["sfreq"])

                corr_pln += [CorrelationAnalyzer(nits)]

            corr_cls_coef = [d.corrcoef for d in corr_cls]
            pr_cls_tmp = np.asarray([
                bct.centrality.pagerank_centrality(g, d=0.85)
                for g in corr_cls_coef
Exemple #7
0
from nitime import TimeSeries
from nitime.analysis import MTCoherenceAnalyzer
from nitime.viz import drawmatrix_channels

f_up = 13  # upper limit
f_lw = 8  # lower limit

cohMatrixNormal = np.empty([np.shape(labelTsNormal)[1], np.shape(labelTsNormal)[1],
                          np.shape(labelTsNormal)[0]])

labels_name = []
for label in labels:
    labels_name += [label.name]

for j in range(cohMatrixNormal.shape[2]):
    niTS = TimeSeries(labelTsNormal[j], sampling_rate=epochs.info["sfreq"])
    niTS.metadata["roi"] = labels_name

    C = MTCoherenceAnalyzer(niTS)

    # confine analysis to Aplha (8  12 Hz)
    freq_idx = np.where((C.frequencies > f_lw) * (C.frequencies < f_up))[0]

    # compute average coherence &  Averaging on last dimension
    cohMatrixNormal[:, :, j] = np.mean(C.coherence[:, :, freq_idx], -1)


# %%
drawmatrix_channels(bin.astype(int), labels_name, color_anchor=0,
                    title='MEG coherence')
                                    subjects_dir=subjects_dir)

# labels = mne.read_labels_from_annot('subject_1', parc='aparc.DKTatlas40',
#                                     subjects_dir=subjects_dir)

labels_name = [label.name for label in labels]
# for label in labels:
#     labels_name += [label.name]


# %%
coh_list_nrm = []
coh_list_hyp = []

for j in range(len(label_ts_normal_crop)):
    nits = TimeSeries(label_ts_normal_crop[j],
                      sampling_rate=300)  # epochs_normal.info["sfreq"])
    nits.metadata["roi"] = labels_name

    coh_list_nrm += [CoherenceAnalyzer(nits)]

for j in range(len(label_ts_hyp_crop)):
    nits = TimeSeries(label_ts_hyp_crop[j],
                      sampling_rate=300)  # epochs_normal.info["sfreq"])
    nits.metadata["roi"] = labels_name

    coh_list_hyp += [CoherenceAnalyzer(nits)]

# Compute a source estimate per frequency band
bands = dict(theta=[4, 8],
             alpha=[8, 12],
             beta=[13, 25],
Exemple #9
0
        corr_invol = []

        ht_vol_bs = mne.baseline.rescale(
            np.abs(ht_vol[:, :, :, j])**2,
            times,
            baseline=(-1.85, -1.5),
            mode="percent")

        ht_invol_bs = mne.baseline.rescale(
            np.abs(ht_invol[:, :, :, j])**2,
            times,
            baseline=(-1.85, -1.5),
            mode="percent")

        for ts in ht_vol_bs:
            nits = TimeSeries(ts[:, 768:1024],
                              sampling_rate=512)

            corr_vol += [CorrelationAnalyzer(nits)]

        for ts in ht_invol_bs:
            nits = TimeSeries(ts[:, 768:1024],
                              sampling_rate=512)

            corr_invol += [CorrelationAnalyzer(nits)]

        results_vol[band] = np.asarray([c.corrcoef for c in corr_vol])
        results_invol[band] = np.asarray([c.corrcoef for c in corr_invol])

    np.save(graph_data + "%s_vol_pow.npy" % subject,
            results_vol)
    np.save(graph_data + "%s_inv_pow.npy" % subject,