Ejemplo n.º 1
0
def test_dss1(show=False):
    """Test DSS1 (evoked)."""
    n_samples = 300
    data, source = create_data(n_samples=n_samples)

    todss, _, pwr0, pwr1 = dss.dss1(data, weights=None, )
    z = fold(np.dot(unfold(data), todss), epoch_size=n_samples)

    best_comp = np.mean(z[:, 0, :], -1)
    scale = np.ptp(best_comp) / np.ptp(source)

    assert_allclose(np.abs(best_comp), np.abs(np.squeeze(source)) * scale,
                    atol=1e-6)  # use abs as DSS component might be flipped

    # With weights
    weights = np.zeros(n_samples)
    weights[100:200] = 1  # we placed the signal is in the middle of the trial
    todss, _, pwr0, pwr1 = dss.dss1(data, weights=weights)
    z = fold(np.dot(unfold(data), todss), epoch_size=n_samples)

    best_comp = np.mean(z[:, 0, :], -1)
    scale = np.ptp(best_comp) / np.ptp(source)

    if show:
        f, (ax1, ax2, ax3) = plt.subplots(3, 1)
        ax1.plot(source, label='source')
        ax2.plot(np.mean(data, 2), label='data')
        ax3.plot(best_comp, label='recovered')
        plt.legend()
        plt.show()

    assert_allclose(np.abs(best_comp), np.abs(np.squeeze(source)) * scale,
                    atol=1e-6)  # use abs as DSS component might be flipped
Ejemplo n.º 2
0
def test_dss0(n_bad_chans):
    """Test dss0.

    Find the linear combinations of multichannel data that
    maximize repeatability over trials. Data are time * channel * trials.

    Uses dss0().

    `n_bad_chans` set the values of the first corresponding number of channels
    to zero.
    """
    # create synthetic data
    n_samples = 100 * 3
    n_chans = 30
    n_trials = 100
    noise_dim = 20  # dimensionality of noise

    # source
    source = np.hstack(
        (np.zeros((n_samples // 3, )),
         np.sin(2 * np.pi * np.arange(n_samples // 3) / (n_samples / 3)).T,
         np.zeros((n_samples // 3, ))))[np.newaxis].T
    s = source * np.random.randn(1, n_chans)  # 300 * 30
    s = s[:, :, np.newaxis]
    s = np.tile(s, (1, 1, 100))

    # set first `n_bad_chans` to zero
    s[:, :n_bad_chans] = 0.

    # noise
    noise = np.dot(unfold(np.random.randn(n_samples, noise_dim, n_trials)),
                   np.random.randn(noise_dim, n_chans))
    noise = fold(noise, n_samples)

    # mix signal and noise
    SNR = 0.1
    data = noise / rms(noise.flatten()) + SNR * s / rms(s.flatten())

    # apply DSS to clean them
    c0, _ = tscov(data)
    c1, _ = tscov(np.mean(data, 2))
    [todss, _, pwr0, pwr1] = dss.dss0(c0, c1)
    z = fold(np.dot(unfold(data), todss), epoch_size=n_samples)

    best_comp = np.mean(z[:, 0, :], -1)
    scale = np.ptp(best_comp) / np.ptp(source)

    assert_allclose(np.abs(best_comp),
                    np.abs(np.squeeze(source)) * scale,
                    atol=1e-6)  # use abs as DSS component might be flipped
Ejemplo n.º 3
0
def test_dss0(n_bad_chans):
    """Test dss0.

    Find the linear combinations of multichannel data that
    maximize repeatability over trials. Data are time * channel * trials.

    Uses dss0().

    `n_bad_chans` set the values of the first corresponding number of channels
    to zero.
    """
    n_samples = 300
    data, source = create_data(n_samples=n_samples, n_bad_chans=n_bad_chans)

    # apply DSS to clean them
    c0, _ = tscov(data)
    c1, _ = tscov(np.mean(data, 2))
    [todss, _, pwr0, pwr1] = dss.dss0(c0, c1)
    z = fold(np.dot(unfold(data), todss), epoch_size=n_samples)

    best_comp = np.mean(z[:, 0, :], -1)
    scale = np.ptp(best_comp) / np.ptp(source)

    assert_allclose(np.abs(best_comp), np.abs(np.squeeze(source)) * scale,
                    atol=1e-6)  # use abs as DSS component might be flipped
Ejemplo n.º 4
0
def create_data(n_samples=100 * 3, n_chans=30, n_trials=100, noise_dim=20,
                n_bad_chans=1, SNR=.1, show=False):
    """Create synthetic data.

    Parameters
    ----------
    n_samples : int
        [description], by default 100*3
    n_chans : int
        [description], by default 30
    n_trials : int
        [description], by default 100
    noise_dim : int
        Dimensionality of noise, by default 20
    n_bad_chans : int
        [description], by default 1

    Returns
    -------
    data : ndarray, shape=(n_samples, n_chans, n_trials)
    source : ndarray, shape=(n_samples,)
    """
    # source
    source = np.hstack((
        np.zeros((n_samples // 3,)),
        np.sin(2 * np.pi * np.arange(n_samples // 3) / (n_samples / 3)).T,
        np.zeros((n_samples // 3,))))[np.newaxis].T
    s = source * np.random.randn(1, n_chans)  # 300 * 30
    s = s[:, :, np.newaxis]
    s = np.tile(s, (1, 1, 100))

    # set first `n_bad_chans` to zero
    s[:, :n_bad_chans] = 0.

    # noise
    noise = np.dot(
        unfold(np.random.randn(n_samples, noise_dim, n_trials)),
        np.random.randn(noise_dim, n_chans))
    noise = fold(noise, n_samples)

    # mix signal and noise
    data = noise / rms(noise.flatten()) + SNR * s / rms(s.flatten())

    if show:
        f, ax = plt.subplots(3)
        ax[0].plot(source[:, 0], label='source')
        ax[1].plot(noise[:, 1, 0], label='noise')
        ax[2].plot(data[:, 1, 0], label='mixture')
        ax[0].legend()
        ax[1].legend()
        ax[2].legend()
        plt.show()

    return data, source
Ejemplo n.º 5
0
def create_data(n_times,
                n_chans=10,
                n_trials=20,
                freq=12,
                sfreq=250,
                noise_dim=8,
                SNR=.8,
                t0=100,
                show=False):
    """Create synthetic data.

    Returns
    -------
    noisy_data: array, shape=(n_times, n_channels, n_trials)
        Simulated data with oscillatory component strting at t0.

    """
    # source
    source = np.sin(2 * np.pi * freq * np.arange(n_times - t0) / sfreq)[None].T
    s = source * np.random.randn(1, n_chans)
    s = s[:, :, np.newaxis]
    s = np.tile(s, (1, 1, n_trials))
    signal = np.zeros((n_times, n_chans, n_trials))
    signal[t0:, :, :] = s

    # noise
    noise = np.dot(unfold(np.random.randn(n_times, noise_dim, n_trials)),
                   np.random.randn(noise_dim, n_chans))
    noise = fold(noise, n_times)

    # mix signal and noise
    signal = SNR * signal / rms(signal.flatten())
    noise = noise / rms(noise.flatten())
    noisy_data = signal + noise

    if show:
        f, ax = plt.subplots(3)
        ax[0].plot(signal[:, 0, 0], label='source')
        ax[1].plot(noise[:, 1, 0], label='noise')
        ax[2].plot(noisy_data[:, 1, 0], label='mixture')
        ax[0].legend()
        ax[1].legend()
        ax[2].legend()
        plt.show()

    return noisy_data, signal
Ejemplo n.º 6
0
def test_tspca_sns_dss():  # TODO
    """Test TSPCA, SNS, DSS.

    Requires data stored in a time X channels X trials matrix.

    Remove environmental noise with TSPCA (shifts=-50:50).
    Remove sensor noise with SNS.
    Remove non-repeatable components with DSS.
    """
    # Random data (time*chans*trials)
    data = np.random.random((800, 102, 200))
    ref = np.random.random((800, 3, 200))

    # remove means
    noisy_data = demean(data)
    noisy_ref = demean(ref)

    # Apply TSPCA
    # -------------------------------------------------------------------------
    # shifts = np.arange(-50, 51)
    # print('TSPCA...')
    # y_tspca, idx = tspca.tsr(noisy_data, noisy_ref, shifts)[0:2]
    # print('\b OK!')
    y_tspca = noisy_data

    # Apply SNS
    # -------------------------------------------------------------------------
    nneighbors = 10
    print('SNS...')
    y_tspca_sns, r = sns.sns(y_tspca, nneighbors)
    print('\b OK!')

    # apply DSS
    # -------------------------------------------------------------------------
    print('DSS...')
    # Keep all PC components
    y_tspca_sns = demean(y_tspca_sns)
    print(y_tspca_sns.shape)
    todss, fromdss, _, _ = dss.dss1(y_tspca_sns)
    print('\b OK!')

    # c3 = DSS components
    y_tspca_sns_dss = fold(np.dot(unfold(y_tspca_sns), todss),
                           y_tspca_sns.shape[0])

    return y_tspca, y_tspca_sns, y_tspca_sns_dss
Ejemplo n.º 7
0
def _stim_data(n_times, n_chans, n_trials, noise_dim, SNR=1, t0=100):
    """Create synthetic data."""
    # source
    source = np.sin(2 * np.pi * np.linspace(0, .5, n_times - t0))[np.newaxis].T
    s = source * np.random.randn(1, n_chans)
    s = s[:, :, np.newaxis]
    s = np.tile(s, (1, 1, n_trials))
    signal = np.zeros((n_times, n_chans, n_trials))
    signal[t0:, :, :] = s

    # noise
    noise = np.dot(unfold(np.random.randn(n_times, noise_dim, n_trials)),
                   np.random.randn(noise_dim, n_chans))
    noise = fold(noise, n_times)

    # mix signal and noise
    signal = SNR * signal / rms(signal.flatten())
    noise = noise / rms(noise.flatten())
    noisy_data = signal + noise
    return noisy_data, signal
Ejemplo n.º 8
0
n_trials = 100
noise_dim = 20  # dimensionality of noise

# Source signal
source = np.hstack(
    (np.zeros((n_samples // 3, )),
     np.sin(2 * np.pi * np.arange(n_samples // 3) / (n_samples / 3)).T,
     np.zeros((n_samples // 3, ))))[np.newaxis].T
s = source * np.random.randn(1, n_chans)  # 300 * 30
s = s[:, :, np.newaxis]
s = np.tile(s, (1, 1, 100))

# Noise
noise = np.dot(unfold(np.random.randn(n_samples, noise_dim, n_trials)),
               np.random.randn(noise_dim, n_chans))
noise = fold(noise, n_samples)

# Mix signal and noise
SNR = 0.1
data = noise / rms(noise.flatten()) + SNR * s / rms(s.flatten())

###############################################################################
# Apply DSS to clean them
# -----------------------------------------------------------------------------

# Compute original and biased covariance matrices
c0, _ = tscov(data)

# In this case the biased covariance is simply the covariance of the mean over
# trials
c1, _ = tscov(np.mean(data, 2))
Ejemplo n.º 9
0
noise_dim = 8
SNR = .2
t0 = 100

# source
source = np.sin(2 * np.pi * target * np.arange(n_times - t0) / sfreq)[None].T
s = source * np.random.randn(1, n_chans)
s = s[:, :, np.newaxis]
s = np.tile(s, (1, 1, n_trials))
signal = np.zeros((n_times, n_chans, n_trials))
signal[t0:, :, :] = s

# noise
noise = np.dot(unfold(np.random.randn(n_times, noise_dim, n_trials)),
               np.random.randn(noise_dim, n_chans))
noise = fold(noise, n_times)

# mix signal and noise
signal = SNR * signal / rms(signal.flatten())
noise = noise / rms(noise.flatten())
data = signal + noise

# Plot
f, ax = plt.subplots(3)
ax[0].plot(signal[:, 0, 0], c='C0', label='source')
ax[1].plot(noise[:, 1, 0], c='C1', label='noise')
ax[2].plot(data[:, 1, 0], c='C2', label='mixture')
ax[0].legend()
ax[1].legend()
ax[2].legend()