def test_TangentSpace_inversetransform_without_fit(): """Test inverse transform of Tangent Space without fit.""" covset = generate_cov(10, 3) ts = TangentSpace(metric='identity') tsv = ts.fit_transform(covset) ts = TangentSpace(metric='riemann') cov = ts.inverse_transform(tsv) assert_array_almost_equal(covset, cov)
def test_TangentSpace_inversetransform(): """Test inverse transform of Tangent Space.""" covset = generate_cov(10, 3) ts = TangentSpace(metric='riemann') ts.fit(covset) t = ts.transform(covset) cov = ts.inverse_transform(t) assert_array_almost_equal(covset, cov)
p, F = p_test.test(epochs_data, labels) duration = time() - t_init fig, axes = plt.subplots(1, 1, figsize=[6, 3], sharey=True) p_test.plot(nbins=10, axes=axes) plt.title('Pairwise distance - %.2f sec.' % duration) print('p-value: %.3f' % p) sns.despine() plt.tight_layout() plt.show() ############################################################################### # Classification based permutation test ############################################################################### clf = make_pipeline(XdawnCovariances(2), TangentSpace('logeuclid'), LogisticRegression()) t_init = time() p_test = PermutationModel(n_perms, model=clf, cv=3) p, F = p_test.test(epochs_data, labels) duration = time() - t_init fig, axes = plt.subplots(1, 1, figsize=[6, 3], sharey=True) p_test.plot(nbins=10, axes=axes) plt.title('Classification - %.2f sec.' % duration) print('p-value: %.3f' % p) sns.despine() plt.tight_layout() plt.show()
def test_TangentSpace_transform_with_ts_update(): """Test transform of Tangent Space with TSupdate.""" covset = generate_cov(10, 3) ts = TangentSpace(metric='riemann', tsupdate=True) ts.fit(covset) ts.transform(covset)
def test_TangentSpace_transform_without_fit(): """Test transform of Tangent Space without fit.""" covset = generate_cov(10, 3) ts = TangentSpace(metric='riemann') ts.transform(covset)
def test_TangentSpace_transform(): """Test transform of Tangent Space.""" covset = generate_cov(10, 3) ts = TangentSpace(metric='riemann') ts.fit(covset) ts.transform(covset)
def test_TangentSpace_fit(): """Test Fit of Tangent Space.""" covset = generate_cov(10, 3) ts = TangentSpace(metric='riemann') ts.fit(covset)
def test_TangentSpace_init(): """Test init of Tangent.""" TangentSpace(metric='riemann')
verbose=False) labels = epochs.events[:, -1] evoked = epochs.average() ############################################################################### # Decoding in tangent space with a logistic regression n_components = 2 # pick some components # Define a monte-carlo cross-validation generator (reduce variance): cv = KFold(n_splits=10, shuffle=True, random_state=42) epochs_data = epochs.get_data() clf = make_pipeline(XdawnCovariances(n_components), TangentSpace(metric='riemann'), LogisticRegression()) preds = np.zeros(len(labels)) for train_idx, test_idx in cv.split(epochs_data): y_train, y_test = labels[train_idx], labels[test_idx] clf.fit(epochs_data[train_idx], y_train) preds[test_idx] = clf.predict(epochs_data[test_idx]) # Printing the results acc = np.mean(preds == labels) print("Classification accuracy: %f " % (acc)) names = ['audio left', 'audio right', 'vis left', 'vis right'] plot_confusion_matrix(preds, labels, names)