def test_Classifier_predict_ReturnsIntegerArray(): X = np.array([[1], [2], [3]]) Y = np.array([0, 0, 1]) elm = ELMClassifier().fit(X, Y) Yh = elm.predict(X) assert isinstance(Yh, np.ndarray) assert Yh == approx(Yh.astype(np.int))
def test_Classifier_predict_ReturnsIntegerArray(self): X = np.array([[1], [2], [3]]) Y = np.array([0, 0, 1]) elm = ELMClassifier() elm.fit(X, Y) Yh = elm.predict(X) self.assertIsInstance(Yh, np.ndarray) assert_allclose(Yh, Yh.astype(np.int))
def test_ELMClassifier_MultilabelClassification_Works(self): X, Y = self.data_ml elm_c = ELMClassifier(random_state=0).fit(X, Y) elm_r = ELMRegressor(random_state=0).fit(X, Y) Yc_hat = elm_c.predict(X) Yr_hat = (elm_r.predict(X) >= 0.5).astype(int) assert_allclose(Yc_hat, Yr_hat)
def test_ELMClassifier_MultilabelClassification_Works(data_ml): X, Y = data_ml elm_c = ELMClassifier(random_state=0).fit(X, Y) elm_r = ELMRegressor(random_state=0).fit(X, Y) Yc_hat = elm_c.predict(X) Yr_hat = (elm_r.predict(X) >= 0.5).astype(np.int) assert Yc_hat == approx(Yr_hat)
def test_Classifier_PartialFit(data_class): X, T = data_class elm0 = ELMClassifier(n_neurons=4, alpha=1, random_state=0) elm1 = ELMClassifier(n_neurons=4, alpha=1, random_state=0) elm0.fit(X, T) elm1.partial_fit(X[::2], T[::2]) elm1.partial_fit(X[1::2], T[1::2]) assert elm0.solver_.coef_ == approx(elm1.solver_.coef_)
def test_Classifier_PartialFit(self): X, T = self.data_class elm0 = ELMClassifier(n_neurons=4, alpha=1, random_state=0) elm1 = ELMClassifier(n_neurons=4, alpha=1, random_state=0) elm0.fit(X, T) elm1.partial_fit(X[::2], T[::2]) elm1.partial_fit(X[1::2], T[1::2]) assert_allclose(elm0.solver_.coef_, elm1.solver_.coef_)
def test_ELMClassifier_ReportedScore_ActuallyIsClassificationScore(data_class): X, Y = data_class Yr = np.vstack((Y == 0, Y == 1, Y == 2)).T elm_c = ELMClassifier(random_state=0).fit(X, Y) elm_r = ELMRegressor(random_state=0).fit(X, Yr) Yc_hat = elm_c.predict(X) Yr_hat = elm_r.predict(X).argmax(1) assert Yc_hat == approx(Yr_hat)
def test_Xor_OneNeuron_Solved(self): """ELM should be able to solve XOR problem. """ X = np.array([[0, 0], [1, 1], [1, 0], [0, 1]]) Y = np.array([1, 1, -1, -1]) elm = ELMClassifier(n_neurons=3, random_state=0) elm.fit(X, Y) Yh = elm.predict(X) self.assertGreater(Yh[0], 0) self.assertGreater(Yh[1], 0) self.assertLess(Yh[2], 0) self.assertLess(Yh[3], 0)
def test_Xor_OneNeuron_Solved(): """ELM should be able to solve XOR problem. """ X = np.array([[0, 0], [1, 1], [1, 0], [0, 1]]) Y = np.array([1, 1, -1, -1]) elm = ELMClassifier(n_neurons=3, random_state=0) elm.fit(X, Y) Yh = elm.predict(X) assert Yh[0] > 0 assert Yh[1] > 0 assert Yh[2] < 0 assert Yh[3] < 0
def test_IterativeSolver_SkipIntermediateSolution(self): X, T = self.data_class elm = ELMClassifier(classes=[0, 1, 2], n_neurons=10, alpha=1) X0 = X[T == 0] Y0 = T[T == 0] elm.partial_fit(X0, Y0, compute_output_weights=False) X1 = X[T == 1] Y1 = T[T == 1] elm.partial_fit(X1, Y1, compute_output_weights=False) X2 = X[T == 2] Y2 = T[T == 2] elm.partial_fit(X2, Y2) Yh = elm.predict(X) self.assertEqual(set(Yh), {0, 1, 2})
def test_Serialize_ContinueTraining(data_class): X, Y = data_class x1, y1 = X[0::2], Y[0::2] x2, y2 = X[1::2], Y[1::2] elm1 = ELMClassifier(n_neurons=10, random_state=0) elm1.fit(X, Y) Yh1 = elm1.predict(X) elm2 = ELMClassifier(n_neurons=10, random_state=0) elm2.fit(x1, y1) elm2_data = pickle.dumps(elm2, pickle.HIGHEST_PROTOCOL) del elm2 elm2_loaded = pickle.loads(elm2_data) elm2_loaded.partial_fit(x2, y2) Yh2 = elm2_loaded.predict(X) assert Yh1 == pytest.approx(Yh2)
def test_IterativeClassification_FeedClassesOneByOne(self): X, T = self.data_class elm = ELMClassifier(classes=[0, -1, -2], n_neurons=10, alpha=1) X0 = X[T == 0] Y0 = T[T == 0] elm.partial_fit(X0, Y0) X1 = X[T == 1] Y1 = T[T == 1] elm.partial_fit(X1, Y1, update_classes=True) X2 = X[T == 2] Y2 = T[T == 2] elm.partial_fit(X2, Y2, update_classes=True) Yh = elm.predict(X) self.assertEqual(set(Yh), {0, 1, 2})
def test_SerializeToFile_ContinueTraining(data_class): X, Y = data_class x1, y1 = X[0::2], Y[0::2] x2, y2 = X[1::2], Y[1::2] elm1 = ELMClassifier(n_neurons=10, random_state=0) elm1.fit(X, Y) Yh1 = elm1.predict(X) elm2 = ELMClassifier(n_neurons=10, random_state=0) elm2.fit(x1, y1) with tempfile.TemporaryFile() as ftemp: pickle.dump(elm2, ftemp, pickle.HIGHEST_PROTOCOL) del elm2 ftemp.seek(0) elm2_reloaded = pickle.load(ftemp) elm2_reloaded.partial_fit(x2, y2) Yh2 = elm2_reloaded.predict(X) assert Yh1 == pytest.approx(Yh2)
def test_Serialize_HiddenLayer(data_class): X, Y = data_class elm = ELMClassifier(n_neurons=(5, 6, 7), ufunc=('tanh', None, 'sigm'), density=(None, None, 0.5), pairwise_metric=(None, 'l1', None), random_state=0) elm.fit(X, Y) Yh1 = elm.predict(X) hl_data = [ pickle.dumps(hl, pickle.HIGHEST_PROTOCOL) for hl in elm.hidden_layers_ ] del elm.hidden_layers_ elm.hidden_layers_ = [pickle.loads(z) for z in hl_data] Yh2 = elm.predict(X) assert Yh1 == pytest.approx(Yh2)
def test_Serialize_ELM(data_class): X, Y = data_class elm = ELMClassifier(n_neurons=(5, 6, 7), ufunc=('tanh', None, 'sigm'), density=(None, None, 0.5), pairwise_metric=(None, 'l1', None), random_state=0) elm.fit(X, Y) Yh1 = elm.predict(X) elm_data = pickle.dumps(elm, pickle.HIGHEST_PROTOCOL) elm2 = pickle.loads(elm_data) Yh2 = elm.predict(X) assert Yh1 == pytest.approx(Yh2)
def test_Classifier_WrongNumberOfFeatures_RaisesError(self): X, T = self.data_class elm = ELMClassifier() elm.fit(X, T) with self.assertRaises(ValueError): elm.predict(X[:, 1:])
def test_Classifier_WrongNumberOfFeatures_RaisesError(data_class): X, T = data_class elm = ELMClassifier().fit(X, T) with pytest.raises(ValueError): elm.predict(X[:, 1:])
def test_Classifier_SetClasses_IgnoresOther(self): X, T = self.data_class elm = ELMClassifier(classes=[0, 1]) Yh = elm.fit(X, T).predict(X) self.assertEqual(set(Yh), {0, 1})
def test_RegularizationAlpha_NegativeValue_Raises(data_class): X, Y = data_class elm = ELMClassifier(alpha=-1) with pytest.raises(ValueError): elm.fit(X, Y)
def test_Classifier_SetClasses_IgnoresOther(data_class): X, T = data_class elm = ELMClassifier(classes=[0, 1]) Yh = elm.fit(X, T).predict(X) assert set(Yh) == {0, 1}
def test_RegularizationAlpha_NegativeValue_Raises(self): X, Y = self.data_class elm = ELMClassifier(alpha=-1) with self.assertRaises(ValueError): elm.fit(X, Y)
""" ============================ Plotting Template Classifier ============================ An example plot of :class:`skelm.template.TemplateClassifier` """ import numpy as np from matplotlib import pyplot as plt from skelm import ELMClassifier X = [[0, 0], [1, 1]] y = [0, 1] clf = ELMClassifier() clf.fit(X, y) rng = np.random.RandomState(13) X_test = rng.rand(500, 2) y_pred = clf.predict(X_test) X_0 = X_test[y_pred == 0] X_1 = X_test[y_pred == 1] p0 = plt.scatter(0, 0, c='red', s=100) p1 = plt.scatter(1, 1, c='blue', s=100) ax0 = plt.scatter(X_0[:, 0], X_0[:, 1], c='crimson', s=50) ax1 = plt.scatter(X_1[:, 0], X_1[:, 1], c='deepskyblue', s=50) leg = plt.legend([p0, p1, ax0, ax1], ['Point 0', 'Point 1', 'Class 0', 'Class 1'],
def test_Classifier_Multilabel(self): X, T = self.data_class Y = np.ones((T.shape[0], 2)) Y[:, 0] = T elm = ELMClassifier() elm.fit(X, Y)
def test_Classification_Iris_BetterThanNaive(data_class): elm = ELMClassifier(random_state=0) elm.fit(*data_class) score = elm.score(*data_class) assert score > 0.33