Ejemplo n.º 1
0
def test_MultipleHiddenLayers(data_reg):
    X, Y = data_reg
    elm = ELMRegressor(n_neurons=[2, 3],
                       ufunc=['tanh', 'sigm'],
                       density=[None, None],
                       pairwise_metric=[None, None])
    elm.fit(X, Y)
    assert len(elm.hidden_layers_) == 2
Ejemplo n.º 2
0
 def test_MultipleHiddenLayers(self):
     X, Y = self.data_reg
     elm = ELMRegressor(n_neurons=[2, 3],
                        ufunc=['tanh', 'sigm'],
                        density=[None, None],
                        pairwise_metric=[None, None])
     elm.fit(X, Y)
     self.assertEqual(len(elm.SLFNs_), 2)
Ejemplo n.º 3
0
def test_MultipleHiddenLayers_MoreCombinations(data_reg):
    X, Y = data_reg
    elm = ELMRegressor(n_neurons=[1, 1, 1, 1, 1],
                       ufunc=['relu', 'sigm', np.sin, None, None],
                       density=[None, 0.5, 0.8, None, None],
                       pairwise_metric=[None, None, None, 'l1', 'chebyshev'])
    elm.fit(X, Y)
    assert len(elm.hidden_layers_) == 5
Ejemplo n.º 4
0
def test_ELMClassifier_MultilabelClassification_Works(data_ml):
    X, Y = data_ml
    elm_c = ELMClassifier(random_state=0).fit(X, Y)
    elm_r = ELMRegressor(random_state=0).fit(X, Y)

    Yc_hat = elm_c.predict(X)
    Yr_hat = (elm_r.predict(X) >= 0.5).astype(np.int)

    assert Yc_hat == approx(Yr_hat)
Ejemplo n.º 5
0
    def test_ELMClassifier_MultilabelClassification_Works(self):
        X, Y = self.data_ml
        elm_c = ELMClassifier(random_state=0).fit(X, Y)
        elm_r = ELMRegressor(random_state=0).fit(X, Y)

        Yc_hat = elm_c.predict(X)
        Yr_hat = (elm_r.predict(X) >= 0.5).astype(int)

        assert_allclose(Yc_hat, Yr_hat)
Ejemplo n.º 6
0
 def test_MultipleHiddenLayers_MoreCombinations(self):
     X, Y = self.data_reg
     elm = ELMRegressor(
         n_neurons=[1, 1, 1, 1, 1],
         ufunc=['relu', 'sigm', np.sin, None, None],
         density=[None, 0.5, 0.8, None, None],
         pairwise_metric=[None, None, None, 'l1', 'chebyshev'])
     elm.fit(X, Y)
     self.assertEqual(len(elm.SLFNs_), 5)
Ejemplo n.º 7
0
def test_ELMClassifier_ReportedScore_ActuallyIsClassificationScore(data_class):
    X, Y = data_class
    Yr = np.vstack((Y == 0, Y == 1, Y == 2)).T

    elm_c = ELMClassifier(random_state=0).fit(X, Y)
    elm_r = ELMRegressor(random_state=0).fit(X, Yr)

    Yc_hat = elm_c.predict(X)
    Yr_hat = elm_r.predict(X).argmax(1)

    assert Yc_hat == approx(Yr_hat)
Ejemplo n.º 8
0
def test_SineWave_Solves():
    """A highly non-linear regression problem, with added strong noise.
    """
    X = np.linspace(-1, 1, num=1000)[:, None]
    Y = np.sin(16 * X) * X + 0.2 * np.random.randn(1000)[:, None]

    elm = ELMRegressor(random_state=0)
    elm.fit(X, Y)
    Yt = elm.predict(X)

    MSE = np.mean((Y - Yt)**2)
    assert MSE < 0.3
Ejemplo n.º 9
0
 def test_Default_SetNumberOfNeurons(self):
     X, y = self.data_reg
     elm5 = ELMRegressor(n_neurons=5, random_state=0).fit(X, y)
     elm50 = ELMRegressor(n_neurons=50, random_state=0).fit(X, y)
     score5 = elm5.score(X, y)
     score50 = elm50.score(X, y)
     self.assertGreater(score50, score5)
     self.assertGreater(score50, 0.33)
Ejemplo n.º 10
0
def test_Serialize_Solver(data_reg):
    X, Y = data_reg
    elm = ELMRegressor(random_state=0)
    elm.fit(X, Y)
    Yh1 = elm.predict(X)

    solver_data = pickle.dumps(elm.solver_, pickle.HIGHEST_PROTOCOL)
    del elm.solver_

    elm.solver_ = pickle.loads(solver_data)
    Yh2 = elm.predict(X)
    assert Yh1 == pytest.approx(Yh2)
Ejemplo n.º 11
0
def test_MultipleHL_WrongDimensions_Raises(data_reg):
    X, Y = data_reg
    elm = ELMRegressor(n_neurons=[1, 2, 3, 4], ufunc=['relu', 'sigm'])
    with pytest.raises(ValueError):
        elm.fit(X, Y)
Ejemplo n.º 12
0
 def test_Regressor_WrongNumberOfFeatures_RaisesError(self):
     X, T = self.data_reg
     elm = ELMRegressor()
     elm.fit(X, T)
     with self.assertRaises(ValueError):
         elm.predict(X[:, 1:])
Ejemplo n.º 13
0
 def test_MultipleHL_WrongDimensions_Raises(self):
     X, Y = self.data_reg
     elm = ELMRegressor(n_neurons=[1, 2, 3, 4], ufunc=['relu', 'sigm'])
     with self.assertRaises(ValueError):
         elm.fit(X, Y)
Ejemplo n.º 14
0
 def test_MultipleHL_Pairwise_SingleValue(self):
     X, Y = self.data_reg
     elm = ELMRegressor(n_neurons=[2, 3], pairwise_metric='l2')
     elm.fit(X, Y)
Ejemplo n.º 15
0
 def test_MultipleHL_Density_SingleValue(self):
     X, Y = self.data_reg
     elm = ELMRegressor(n_neurons=[2, 3], density=0.7)
     elm.fit(X, Y)
Ejemplo n.º 16
0
def test_Regressor_WrongNumberOfFeatures_RaisesError(data_reg):
    X, T = data_reg
    elm = ELMRegressor().fit(X, T)
    with pytest.raises(ValueError):
        elm.predict(X[:, 1:])
Ejemplo n.º 17
0
"""
===========================
Plotting Template Estimator
===========================

An example plot of :class:`skelm.template.TemplateEstimator`
"""
import numpy as np
from matplotlib import pyplot as plt
from skelm import ELMRegressor

X = np.arange(100).reshape(100, 1)
y = np.zeros((100, ))
estimator = ELMRegressor()
estimator.fit(X, y)
plt.plot(estimator.predict(X))
plt.show()
Ejemplo n.º 18
0
    def test_RegularizationL2_DifferentValue_ChangesPrediction(self):
        X, Y = self.data_reg
        Yh_1 = ELMRegressor(alpha=1e-7, random_state=0).fit(X, Y).predict(X)
        Yh_2 = ELMRegressor(alpha=1e+3, random_state=0).fit(X, Y).predict(X)

        self.assertFalse(np.allclose(Yh_1, Yh_2))
Ejemplo n.º 19
0
 def test_MultipleHL_DefaultValues(self):
     X, Y = self.data_reg
     elm = ELMRegressor(n_neurons=[2, 3])
     elm.fit(X, Y)
Ejemplo n.º 20
0
 def test_MultipleHL_Ufunc_SingleValue(self):
     X, Y = self.data_reg
     elm = ELMRegressor(n_neurons=[2, 3], ufunc='sigm')
     elm.fit(X, Y)
Ejemplo n.º 21
0
def test_RegularizationL2_DifferentValue_ChangesPrediction(data_reg):
    X, Y = data_reg
    Yh_1 = ELMRegressor(alpha=1e-7, random_state=0).fit(X, Y).predict(X)
    Yh_2 = ELMRegressor(alpha=1e+3, random_state=0).fit(X, Y).predict(X)

    assert Yh_1 != approx(Yh_2)
Ejemplo n.º 22
0
def test_Regression_Boston_BetterThanNaive(data_reg):
    elm = ELMRegressor(random_state=0)
    elm.fit(*data_reg)
    r2score = elm.score(*data_reg)
    assert r2score > 0.3