Esempio n. 1
0
def test_unsupervised_spatial_filter():
    """Test unsupervised spatial filter."""
    from sklearn.decomposition import PCA
    from sklearn.kernel_ridge import KernelRidge
    raw = io.read_raw_fif(raw_fname)
    events = read_events(event_name)
    picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
                       eog=False, exclude='bads')
    picks = picks[1:13:3]
    epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
                    preload=True, baseline=None, verbose=False)

    # Test estimator
    assert_raises(ValueError, UnsupervisedSpatialFilter, KernelRidge(2))

    # Test fit
    X = epochs.get_data()
    n_components = 4
    usf = UnsupervisedSpatialFilter(PCA(n_components))
    usf.fit(X)
    usf1 = UnsupervisedSpatialFilter(PCA(n_components))

    # test transform
    assert_equal(usf.transform(X).ndim, 3)
    # test fit_transform
    assert_array_almost_equal(usf.transform(X), usf1.fit_transform(X))
    # assert shape
    assert_equal(usf.transform(X).shape[1], n_components)

    # Test with average param
    usf = UnsupervisedSpatialFilter(PCA(4), average=True)
    usf.fit_transform(X)
    assert_raises(ValueError, UnsupervisedSpatialFilter, PCA(4), 2)
Esempio n. 2
0
def test_unsupervised_spatial_filter():
    """Test unsupervised spatial filter."""
    from sklearn.decomposition import PCA
    from sklearn.kernel_ridge import KernelRidge
    raw = io.read_raw_fif(raw_fname)
    events = read_events(event_name)
    picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
                       eog=False, exclude='bads')
    picks = picks[1:13:3]
    epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
                    preload=True, baseline=None, verbose=False)

    # Test estimator
    assert_raises(ValueError, UnsupervisedSpatialFilter, KernelRidge(2))

    # Test fit
    X = epochs.get_data()
    n_components = 4
    usf = UnsupervisedSpatialFilter(PCA(n_components))
    usf.fit(X)
    usf1 = UnsupervisedSpatialFilter(PCA(n_components))

    # test transform
    assert_equal(usf.transform(X).ndim, 3)
    # test fit_transform
    assert_array_almost_equal(usf.transform(X), usf1.fit_transform(X))
    assert_equal(usf.transform(X).shape[1], n_components)
    assert_array_almost_equal(usf.inverse_transform(usf.transform(X)), X)

    # Test with average param
    usf = UnsupervisedSpatialFilter(PCA(4), average=True)
    usf.fit_transform(X)
    assert_raises(ValueError, UnsupervisedSpatialFilter, PCA(4), 2)
Esempio n. 3
0
epochs = mne.Epochs(p25_dat,
                    events, {
                        "target": 1,
                        "not-target": 2
                    },
                    preload=True)

X = epochs["target"].get_data()
ica = UnsupervisedSpatialFilter(FastICA(), average=False)
ica_data = ica.fit_transform(X)
ev2 = mne.EvokedArray(
    np.mean(ica_data, axis=0),
    mne.create_info(32, epochs.info['sfreq'], ch_types='eeg'))

ev2.plot(show=False)

# ICA

# explore difference in components based on "epoch", seems to be a split after
# epoch ~30, is this a different test? Group by
ica = ICA(n_components=.99, method='fastica')

ica.fit(epochs["target"]).plot_components(inst=epochs["target"])

ica.fit(epochs["not-target"]).plot_components(inst=epochs["not-target"])

from mne.io.pick import _pick_data_channels
# look at target where user got correct answer vs not-target...
picks = _pick_data_channels(epochs.info, exclude='bads', with_ref_meg=False)
Esempio n. 4
0
##normalizing ICA data
#for epochs_idx in range(len(ica_data)):
#    for channels_idx in range(14):
#        ica_data[epochs_idx,channels_idx] /= ica_data[epochs_idx].sum()

ica_data_reshape = ica_data.reshape(
    (ica_data.shape[0], ica_data.shape[1] * ica_data.shape[2]))

#------------------------------------------------------------------------------

#Checking ICA through plot

method = 'fastica'
random_state = 42
ica = ICA(n_components=13, method=method, random_state=random_state)
ica.fit(epochs)
ica.plot_components()
ica.plot_properties(epochs, picks=0)
ica.plot_overlay(evoked, title='Plot Overlay', show=True)

#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------

from PIL import Image
import os.path, sys

#zero-produceERP-Image
steps = np.arange(9.5, 20.5, 0.001)
for step in steps:
    evoked_zero.plot_topomap(times=step,
Esempio n. 5
0
File: rsa.py Progetto: Fosca/umne
def gen_observed_dissimilarity(epochs0,
                               epochs1,
                               n_pca=30,
                               metric='spearmanr',
                               sliding_window_size=None,
                               sliding_window_step=None,
                               sliding_window_min_size=None,
                               debug=None):
    """
    Generate the observed dissimilarity matrix

    :param epochs0: Epohcs, averaged over the relevant parameters
    :param epochs1: Epohcs, averaged over the relevant parameters
    :param n_pca: the number of PCA components.
    :param metric: The metric to use when calculating distance between instances in a feature array, for
            non-Riemannian dissimilarity.
            If metric is a string, it must be one of the options allowed by scipy.spatial.distance.pdist
            for its metric parameter, or a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS.
            If metric is precomputed, X is assumed to be a distance matrix.
            Alternatively, if metric is a callable function, it is called on each pair of instances (rows)
            and the resulting value recorded.
            The callable should take two arrays from X as input and return a value indicating the distance between them.
    :param sliding_window_size: If specified (!= None), the data will be averaged using a sliding window before
                    computing dissimilarity. This parameter is the number of time points included in each window
    :param sliding_window_step: The number of time points for sliding the window on each step
    :param sliding_window_min_size: The minimal number of time points acceptable in the last step of the sliding window.
                                If None: min_window_size will be the same as window_size

    :return: np.array
    """

    #-- Validate input
    assert (sliding_window_size is None) == (sliding_window_step is None), \
        "Either sliding_window_size and sliding_window_step are both None, or they are both not None"
    debug = debug or set()

    if metric == 'mahalanobis' and n_pca is not None:
        print(
            'WARNING: PCA should not be used for metric=mahalanobis, ignoring this parameter'
        )
        n_pca = None

    #-- Original data: #epochs x Channels x TimePoints
    data1 = epochs0.get_data()
    data2 = epochs1.get_data()

    #-- z-scoring doesn't change the data dimensions
    data1 = transformers.ZScoreEachChannel(
        debug=debug is True or 'zscore' in debug).fit_transform(data1)
    data2 = transformers.ZScoreEachChannel(
        debug=debug is True or 'zscore' in debug).fit_transform(data2)

    #-- Run PCA. Resulting data: Epochs x PCA-Components x TimePoints
    if n_pca is not None:
        pca = UnsupervisedSpatialFilter(PCA(n_pca), average=False)
        combined_data = np.vstack([data1, data2])
        pca.fit(combined_data)
        data1 = pca.transform(data1)
        data2 = pca.transform(data2)

    #-- Apply a sliding window
    #-- Result in non-Riemann mode: epochs x Channels/components x TimePoints
    #-- todo: Result in Riemann: TimeWindows x Stimuli x Channels/components x TimePoints-within-one-window; will require SlidingWindow(average=False)
    if sliding_window_size is None:
        times = epochs0.times

    else:
        xformer = transformers.SlidingWindow(
            window_size=sliding_window_size,
            step=sliding_window_step,
            min_window_size=sliding_window_min_size)
        data1 = xformer.fit_transform(data1)
        data2 = xformer.fit_transform(data2)

        mid_window_inds = xformer.start_window_inds(len(
            epochs0.times)) + round(sliding_window_size / 2)
        times = epochs0.times[mid_window_inds]

    #-- Get the dissimilarity matrix
    #-- Result: Time point x epochs1 x epochs2
    dissim_matrices = _compute_dissimilarity(
        data1, data2, metric, debug is True or 'dissim' in debug)
    # todo in Riemann: xformer = RiemannDissimilarity(metric=riemann_metric, debug=debug is True or 'dissim' in debug)

    assert len(dissim_matrices) == len(
        times), "There are {} dissimilarity matrices but {} times".format(
            len(dissim_matrices), len(times))

    return DissimilarityMatrix(dissim_matrices,
                               epochs0.metadata,
                               epochs1.metadata,
                               times=times,
                               epochs0_info=epochs0.info,
                               epochs1_info=epochs1.info)