def test_MultipleHiddenLayers(data_reg): X, Y = data_reg elm = ELMRegressor(n_neurons=[2, 3], ufunc=['tanh', 'sigm'], density=[None, None], pairwise_metric=[None, None]) elm.fit(X, Y) assert len(elm.hidden_layers_) == 2
def test_MultipleHiddenLayers(self): X, Y = self.data_reg elm = ELMRegressor(n_neurons=[2, 3], ufunc=['tanh', 'sigm'], density=[None, None], pairwise_metric=[None, None]) elm.fit(X, Y) self.assertEqual(len(elm.SLFNs_), 2)
def test_MultipleHiddenLayers_MoreCombinations(data_reg): X, Y = data_reg elm = ELMRegressor(n_neurons=[1, 1, 1, 1, 1], ufunc=['relu', 'sigm', np.sin, None, None], density=[None, 0.5, 0.8, None, None], pairwise_metric=[None, None, None, 'l1', 'chebyshev']) elm.fit(X, Y) assert len(elm.hidden_layers_) == 5
def test_ELMClassifier_MultilabelClassification_Works(data_ml): X, Y = data_ml elm_c = ELMClassifier(random_state=0).fit(X, Y) elm_r = ELMRegressor(random_state=0).fit(X, Y) Yc_hat = elm_c.predict(X) Yr_hat = (elm_r.predict(X) >= 0.5).astype(np.int) assert Yc_hat == approx(Yr_hat)
def test_ELMClassifier_MultilabelClassification_Works(self): X, Y = self.data_ml elm_c = ELMClassifier(random_state=0).fit(X, Y) elm_r = ELMRegressor(random_state=0).fit(X, Y) Yc_hat = elm_c.predict(X) Yr_hat = (elm_r.predict(X) >= 0.5).astype(int) assert_allclose(Yc_hat, Yr_hat)
def test_MultipleHiddenLayers_MoreCombinations(self): X, Y = self.data_reg elm = ELMRegressor( n_neurons=[1, 1, 1, 1, 1], ufunc=['relu', 'sigm', np.sin, None, None], density=[None, 0.5, 0.8, None, None], pairwise_metric=[None, None, None, 'l1', 'chebyshev']) elm.fit(X, Y) self.assertEqual(len(elm.SLFNs_), 5)
def test_ELMClassifier_ReportedScore_ActuallyIsClassificationScore(data_class): X, Y = data_class Yr = np.vstack((Y == 0, Y == 1, Y == 2)).T elm_c = ELMClassifier(random_state=0).fit(X, Y) elm_r = ELMRegressor(random_state=0).fit(X, Yr) Yc_hat = elm_c.predict(X) Yr_hat = elm_r.predict(X).argmax(1) assert Yc_hat == approx(Yr_hat)
def test_SineWave_Solves(): """A highly non-linear regression problem, with added strong noise. """ X = np.linspace(-1, 1, num=1000)[:, None] Y = np.sin(16 * X) * X + 0.2 * np.random.randn(1000)[:, None] elm = ELMRegressor(random_state=0) elm.fit(X, Y) Yt = elm.predict(X) MSE = np.mean((Y - Yt)**2) assert MSE < 0.3
def test_Default_SetNumberOfNeurons(self): X, y = self.data_reg elm5 = ELMRegressor(n_neurons=5, random_state=0).fit(X, y) elm50 = ELMRegressor(n_neurons=50, random_state=0).fit(X, y) score5 = elm5.score(X, y) score50 = elm50.score(X, y) self.assertGreater(score50, score5) self.assertGreater(score50, 0.33)
def test_Serialize_Solver(data_reg): X, Y = data_reg elm = ELMRegressor(random_state=0) elm.fit(X, Y) Yh1 = elm.predict(X) solver_data = pickle.dumps(elm.solver_, pickle.HIGHEST_PROTOCOL) del elm.solver_ elm.solver_ = pickle.loads(solver_data) Yh2 = elm.predict(X) assert Yh1 == pytest.approx(Yh2)
def test_MultipleHL_WrongDimensions_Raises(data_reg): X, Y = data_reg elm = ELMRegressor(n_neurons=[1, 2, 3, 4], ufunc=['relu', 'sigm']) with pytest.raises(ValueError): elm.fit(X, Y)
def test_Regressor_WrongNumberOfFeatures_RaisesError(self): X, T = self.data_reg elm = ELMRegressor() elm.fit(X, T) with self.assertRaises(ValueError): elm.predict(X[:, 1:])
def test_MultipleHL_WrongDimensions_Raises(self): X, Y = self.data_reg elm = ELMRegressor(n_neurons=[1, 2, 3, 4], ufunc=['relu', 'sigm']) with self.assertRaises(ValueError): elm.fit(X, Y)
def test_MultipleHL_Pairwise_SingleValue(self): X, Y = self.data_reg elm = ELMRegressor(n_neurons=[2, 3], pairwise_metric='l2') elm.fit(X, Y)
def test_MultipleHL_Density_SingleValue(self): X, Y = self.data_reg elm = ELMRegressor(n_neurons=[2, 3], density=0.7) elm.fit(X, Y)
def test_Regressor_WrongNumberOfFeatures_RaisesError(data_reg): X, T = data_reg elm = ELMRegressor().fit(X, T) with pytest.raises(ValueError): elm.predict(X[:, 1:])
""" =========================== Plotting Template Estimator =========================== An example plot of :class:`skelm.template.TemplateEstimator` """ import numpy as np from matplotlib import pyplot as plt from skelm import ELMRegressor X = np.arange(100).reshape(100, 1) y = np.zeros((100, )) estimator = ELMRegressor() estimator.fit(X, y) plt.plot(estimator.predict(X)) plt.show()
def test_RegularizationL2_DifferentValue_ChangesPrediction(self): X, Y = self.data_reg Yh_1 = ELMRegressor(alpha=1e-7, random_state=0).fit(X, Y).predict(X) Yh_2 = ELMRegressor(alpha=1e+3, random_state=0).fit(X, Y).predict(X) self.assertFalse(np.allclose(Yh_1, Yh_2))
def test_MultipleHL_DefaultValues(self): X, Y = self.data_reg elm = ELMRegressor(n_neurons=[2, 3]) elm.fit(X, Y)
def test_MultipleHL_Ufunc_SingleValue(self): X, Y = self.data_reg elm = ELMRegressor(n_neurons=[2, 3], ufunc='sigm') elm.fit(X, Y)
def test_RegularizationL2_DifferentValue_ChangesPrediction(data_reg): X, Y = data_reg Yh_1 = ELMRegressor(alpha=1e-7, random_state=0).fit(X, Y).predict(X) Yh_2 = ELMRegressor(alpha=1e+3, random_state=0).fit(X, Y).predict(X) assert Yh_1 != approx(Yh_2)
def test_Regression_Boston_BetterThanNaive(data_reg): elm = ELMRegressor(random_state=0) elm.fit(*data_reg) r2score = elm.score(*data_reg) assert r2score > 0.3