Exemple #1
0
def test_standardize():
    rng = np.random.RandomState(42)
    n_features = 10
    n_samples = 17

    # Create random signals with offsets
    a = rng.random_sample((n_samples, n_features))
    a += np.linspace(0, 2., n_features)

    # transpose array to fit _standardize input.
    # Without trend removal
    b = nisignal._standardize(a, standardize='zscore')
    stds = np.std(b)
    np.testing.assert_almost_equal(stds, np.ones(n_features))
    np.testing.assert_almost_equal(b.sum(axis=0), np.zeros(n_features))

    # With trend removal
    a = np.atleast_2d(np.linspace(0, 2., n_features)).T
    b = nisignal._standardize(a, detrend=True, standardize=False)
    np.testing.assert_almost_equal(b, np.zeros(b.shape))

    length_1_signal = np.atleast_2d(np.linspace(0, 2., n_features))
    np.testing.assert_array_equal(
        length_1_signal,
        nisignal._standardize(length_1_signal, standardize='zscore'))
Exemple #2
0
def test_standardize():
    rand_gen = np.random.RandomState(0)
    n_features = 10
    n_samples = 17

    # Create random signals with offsets
    a = rand_gen.random_sample((n_samples, n_features))
    a += np.linspace(0, 2., n_features)

    # transpose array to fit _standardize input.
    # Without trend removal
    b = nisignal._standardize(a, normalize=True)
    energies = (b ** 2).sum(axis=0)
    np.testing.assert_almost_equal(energies, np.ones(n_features))
    np.testing.assert_almost_equal(b.sum(axis=0), np.zeros(n_features))

    # With trend removal
    a = np.atleast_2d(np.linspace(0, 2., n_features)).T
    b = nisignal._standardize(a, detrend=True, normalize=False)
    np.testing.assert_almost_equal(b, np.zeros(b.shape))

    length_1_signal = np.atleast_2d(np.linspace(0, 2., n_features))
    np.testing.assert_array_equal(length_1_signal,
                                  nisignal._standardize(length_1_signal,
                                                        normalize=True))
Exemple #3
0
def test_standardize():
    rand_gen = np.random.RandomState(0)
    n_features = 10
    n_samples = 17

    # Create random signals with offsets
    a = rand_gen.random_sample((n_samples, n_features))
    a += np.linspace(0, 2., n_features)

    # transpose array to fit _standardize input.
    # Without trend removal
    b = nisignal._standardize(a, normalize=True)
    energies = (b ** 2).sum(axis=0)
    np.testing.assert_almost_equal(energies, np.ones(n_features))
    np.testing.assert_almost_equal(b.sum(axis=0), np.zeros(n_features))

    # With trend removal
    a = np.atleast_2d(np.linspace(0, 2., n_features)).T
    b = nisignal._standardize(a, detrend=True, normalize=False)
    np.testing.assert_almost_equal(b, np.zeros(b.shape))

    length_1_signal = np.atleast_2d(np.linspace(0, 2., n_features))
    np.testing.assert_array_equal(length_1_signal,
                                  nisignal._standardize(length_1_signal,
                                                        normalize=True))
Exemple #4
0
    def transform(self, X, vectorize=True, confounds=None):
        """Apply transform to covariances matrices to get the connectivity
        matrices for the chosen kind.

        Parameters
        ----------
        X : list of numpy.ndarray with shapes (n_samples, n_features)
            The input subjects time series.

        vectorize : bool default=True
            If True, flattened lower triangular part of the connectivity
            matrices will be computed and returned.

        confounds: CSV file or array-like, optional
            This parameter is passed to signal.clean. Please see the related
            documentation for details.
            shape: (number of scans, number of confounds)

        Returns
        -------
        output : numpy.ndarray, shape (n_samples, n_features, n_features)
             The transformed connectivity matrices.
        """
        if self.kind == 'correlation':
            covariances_std = [
                self.cov_estimator_.fit(
                    signal._standardize(x, detrend=False,
                                        normalize=True)).covariance_ for x in X
            ]
            connectivities = [_cov_to_corr(cov) for cov in covariances_std]
        else:
            covariances = [self.cov_estimator_.fit(x).covariance_ for x in X]
            if self.kind == 'covariance':
                connectivities = covariances
            elif self.kind == 'tangent':
                connectivities = [
                    _map_eigenvalues(
                        np.log,
                        self.whitening_.dot(cov).dot(self.whitening_))
                    for cov in covariances
                ]
            elif self.kind == 'precision':
                connectivities = [linalg.inv(cov) for cov in covariances]
            elif self.kind == 'partial correlation':
                connectivities = [
                    _prec_to_partial(linalg.inv(cov)) for cov in covariances
                ]
            else:
                raise ValueError('Allowed connectivity kinds are '
                                 '"correlation", '
                                 '"partial correlation", "tangent", '
                                 '"covariance" and "precision", got kind '
                                 '"{}"'.format(self.kind))

        connectivities = np.array(connectivities)

        if vectorize:
            connectivities = sym_to_vec(connectivities, confounds=confounds)

        return connectivities
    def transform(self, X, vectorize=True, confounds=None):
        """Apply transform to covariances matrices to get the connectivity
        matrices for the chosen kind.

        Parameters
        ----------
        X : list of numpy.ndarray with shapes (n_samples, n_features)
            The input subjects time series.

        vectorize : bool default=True
            If True, flattened lower triangular part of the connectivity
            matrices will be computed and returned.

        confounds: CSV file or array-like, optional
            This parameter is passed to signal.clean. Please see the related
            documentation for details.
            shape: (number of scans, number of confounds)

        Returns
        -------
        output : numpy.ndarray, shape (n_samples, n_features, n_features)
             The transformed connectivity matrices.
        """
        print("Connectome matrices estimation")
        if self.kind == 'correlation':
            covariances_std = [self.cov_estimator_.fit(
                signal._standardize(x, detrend=False, normalize=True)
                ).covariance_ for x in X]
            connectivities = [_cov_to_corr(cov) for cov in covariances_std]
        else:
            covariances = [self.cov_estimator_.fit(x).covariance_ for x in X]
            if self.kind == 'covariance':
                connectivities = covariances
            elif self.kind == 'tangent':
                connectivities = [_map_eigenvalues(np.log, self.whitening_.dot(
                                                   cov).dot(self.whitening_))
                                  for cov in covariances]
            elif self.kind == 'precision':
                connectivities = [linalg.inv(cov) for cov in covariances]
            elif self.kind == 'partial correlation':
                connectivities = [_prec_to_partial(linalg.inv(cov))
                                  for cov in covariances]
            else:
                raise ValueError('Allowed connectivity kinds are '
                                 '"correlation", '
                                 '"partial correlation", "tangent", '
                                 '"covariance" and "precision", got kind '
                                 '"{}"'.format(self.kind))

        connectivities = np.array(connectivities)

        if vectorize:
            connectivities = sym_to_vec(connectivities, confounds=confounds)

        return connectivities