def test_Serialize_ELM(data_class):
    X, Y = data_class
    elm = ELMClassifier(n_neurons=(5, 6, 7),
                        ufunc=('tanh', None, 'sigm'),
                        density=(None, None, 0.5),
                        pairwise_metric=(None, 'l1', None),
                        random_state=0)
    elm.fit(X, Y)
    Yh1 = elm.predict(X)

    elm_data = pickle.dumps(elm, pickle.HIGHEST_PROTOCOL)
    elm2 = pickle.loads(elm_data)

    Yh2 = elm.predict(X)
    assert Yh1 == pytest.approx(Yh2)
示例#2
0
def test_Classifier_predict_ReturnsIntegerArray():
    X = np.array([[1], [2], [3]])
    Y = np.array([0, 0, 1])
    elm = ELMClassifier().fit(X, Y)
    Yh = elm.predict(X)
    assert isinstance(Yh, np.ndarray)
    assert Yh == approx(Yh.astype(np.int))
示例#3
0
 def test_Classifier_predict_ReturnsIntegerArray(self):
     X = np.array([[1], [2], [3]])
     Y = np.array([0, 0, 1])
     elm = ELMClassifier()
     elm.fit(X, Y)
     Yh = elm.predict(X)
     self.assertIsInstance(Yh, np.ndarray)
     assert_allclose(Yh, Yh.astype(np.int))
示例#4
0
def test_ELMClassifier_MultilabelClassification_Works(data_ml):
    X, Y = data_ml
    elm_c = ELMClassifier(random_state=0).fit(X, Y)
    elm_r = ELMRegressor(random_state=0).fit(X, Y)

    Yc_hat = elm_c.predict(X)
    Yr_hat = (elm_r.predict(X) >= 0.5).astype(np.int)

    assert Yc_hat == approx(Yr_hat)
def test_Serialize_HiddenLayer(data_class):
    X, Y = data_class
    elm = ELMClassifier(n_neurons=(5, 6, 7),
                        ufunc=('tanh', None, 'sigm'),
                        density=(None, None, 0.5),
                        pairwise_metric=(None, 'l1', None),
                        random_state=0)
    elm.fit(X, Y)
    Yh1 = elm.predict(X)

    hl_data = [
        pickle.dumps(hl, pickle.HIGHEST_PROTOCOL) for hl in elm.hidden_layers_
    ]
    del elm.hidden_layers_

    elm.hidden_layers_ = [pickle.loads(z) for z in hl_data]
    Yh2 = elm.predict(X)
    assert Yh1 == pytest.approx(Yh2)
示例#6
0
    def test_ELMClassifier_MultilabelClassification_Works(self):
        X, Y = self.data_ml
        elm_c = ELMClassifier(random_state=0).fit(X, Y)
        elm_r = ELMRegressor(random_state=0).fit(X, Y)

        Yc_hat = elm_c.predict(X)
        Yr_hat = (elm_r.predict(X) >= 0.5).astype(int)

        assert_allclose(Yc_hat, Yr_hat)
示例#7
0
def test_ELMClassifier_ReportedScore_ActuallyIsClassificationScore(data_class):
    X, Y = data_class
    Yr = np.vstack((Y == 0, Y == 1, Y == 2)).T

    elm_c = ELMClassifier(random_state=0).fit(X, Y)
    elm_r = ELMRegressor(random_state=0).fit(X, Yr)

    Yc_hat = elm_c.predict(X)
    Yr_hat = elm_r.predict(X).argmax(1)

    assert Yc_hat == approx(Yr_hat)
示例#8
0
def test_Xor_OneNeuron_Solved():
    """ELM should be able to solve XOR problem.
    """
    X = np.array([[0, 0], [1, 1], [1, 0], [0, 1]])
    Y = np.array([1, 1, -1, -1])

    elm = ELMClassifier(n_neurons=3, random_state=0)
    elm.fit(X, Y)
    Yh = elm.predict(X)
    assert Yh[0] > 0
    assert Yh[1] > 0
    assert Yh[2] < 0
    assert Yh[3] < 0
示例#9
0
    def test_Xor_OneNeuron_Solved(self):
        """ELM should be able to solve XOR problem.
        """
        X = np.array([[0, 0], [1, 1], [1, 0], [0, 1]])
        Y = np.array([1, 1, -1, -1])

        elm = ELMClassifier(n_neurons=3, random_state=0)
        elm.fit(X, Y)
        Yh = elm.predict(X)
        self.assertGreater(Yh[0], 0)
        self.assertGreater(Yh[1], 0)
        self.assertLess(Yh[2], 0)
        self.assertLess(Yh[3], 0)
示例#10
0
def test_Serialize_ContinueTraining(data_class):
    X, Y = data_class
    x1, y1 = X[0::2], Y[0::2]
    x2, y2 = X[1::2], Y[1::2]

    elm1 = ELMClassifier(n_neurons=10, random_state=0)
    elm1.fit(X, Y)
    Yh1 = elm1.predict(X)

    elm2 = ELMClassifier(n_neurons=10, random_state=0)
    elm2.fit(x1, y1)
    elm2_data = pickle.dumps(elm2, pickle.HIGHEST_PROTOCOL)
    del elm2
    elm2_loaded = pickle.loads(elm2_data)
    elm2_loaded.partial_fit(x2, y2)
    Yh2 = elm2_loaded.predict(X)

    assert Yh1 == pytest.approx(Yh2)
示例#11
0
    def test_IterativeSolver_SkipIntermediateSolution(self):
        X, T = self.data_class
        elm = ELMClassifier(classes=[0, 1, 2], n_neurons=10, alpha=1)

        X0 = X[T == 0]
        Y0 = T[T == 0]
        elm.partial_fit(X0, Y0, compute_output_weights=False)

        X1 = X[T == 1]
        Y1 = T[T == 1]
        elm.partial_fit(X1, Y1, compute_output_weights=False)

        X2 = X[T == 2]
        Y2 = T[T == 2]
        elm.partial_fit(X2, Y2)

        Yh = elm.predict(X)
        self.assertEqual(set(Yh), {0, 1, 2})
示例#12
0
    def test_IterativeClassification_FeedClassesOneByOne(self):
        X, T = self.data_class
        elm = ELMClassifier(classes=[0, -1, -2], n_neurons=10, alpha=1)

        X0 = X[T == 0]
        Y0 = T[T == 0]
        elm.partial_fit(X0, Y0)

        X1 = X[T == 1]
        Y1 = T[T == 1]
        elm.partial_fit(X1, Y1, update_classes=True)

        X2 = X[T == 2]
        Y2 = T[T == 2]
        elm.partial_fit(X2, Y2, update_classes=True)

        Yh = elm.predict(X)
        self.assertEqual(set(Yh), {0, 1, 2})
示例#13
0
def test_SerializeToFile_ContinueTraining(data_class):
    X, Y = data_class
    x1, y1 = X[0::2], Y[0::2]
    x2, y2 = X[1::2], Y[1::2]

    elm1 = ELMClassifier(n_neurons=10, random_state=0)
    elm1.fit(X, Y)
    Yh1 = elm1.predict(X)

    elm2 = ELMClassifier(n_neurons=10, random_state=0)
    elm2.fit(x1, y1)
    with tempfile.TemporaryFile() as ftemp:
        pickle.dump(elm2, ftemp, pickle.HIGHEST_PROTOCOL)
        del elm2
        ftemp.seek(0)
        elm2_reloaded = pickle.load(ftemp)

    elm2_reloaded.partial_fit(x2, y2)
    Yh2 = elm2_reloaded.predict(X)

    assert Yh1 == pytest.approx(Yh2)
示例#14
0
 def test_Classifier_WrongNumberOfFeatures_RaisesError(self):
     X, T = self.data_class
     elm = ELMClassifier()
     elm.fit(X, T)
     with self.assertRaises(ValueError):
         elm.predict(X[:, 1:])
示例#15
0
============================

An example plot of :class:`skelm.template.TemplateClassifier`
"""
import numpy as np
from matplotlib import pyplot as plt
from skelm import ELMClassifier

X = [[0, 0], [1, 1]]
y = [0, 1]
clf = ELMClassifier()
clf.fit(X, y)

rng = np.random.RandomState(13)
X_test = rng.rand(500, 2)
y_pred = clf.predict(X_test)

X_0 = X_test[y_pred == 0]
X_1 = X_test[y_pred == 1]

p0 = plt.scatter(0, 0, c='red', s=100)
p1 = plt.scatter(1, 1, c='blue', s=100)

ax0 = plt.scatter(X_0[:, 0], X_0[:, 1], c='crimson', s=50)
ax1 = plt.scatter(X_1[:, 0], X_1[:, 1], c='deepskyblue', s=50)

leg = plt.legend([p0, p1, ax0, ax1],
                 ['Point 0', 'Point 1', 'Class 0', 'Class 1'],
                 loc='upper left',
                 fancybox=True,
                 scatterpoints=1)
示例#16
0
def test_Classifier_WrongNumberOfFeatures_RaisesError(data_class):
    X, T = data_class
    elm = ELMClassifier().fit(X, T)
    with pytest.raises(ValueError):
        elm.predict(X[:, 1:])