def test_predict(self):
     reg = KNeighborsRegressor(n_neighbors=1)
     reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
     neigh_idx = reg.regressor.predict([[1, 2], [8, 9], [6, 7]])
     assert (neigh_idx[0] == [1, 0]).all()
     assert (neigh_idx[1] == [8, 6]).all()
     assert (neigh_idx[2] == [20, 5]).all()
 def test_kneighbors(self):
     reg = KNeighborsRegressor(n_neighbors=2)
     reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
     neigh_idx = reg.regressor.kneighbors([[6, 6]],
                                          return_distance=False)[0]
     assert neigh_idx[0] == 1
     assert neigh_idx[1] == 2
     assert len(neigh_idx) == 2
 def test_wrong2(self):
     # wrong number of values
     with warnings.catch_warnings():
         warnings.filterwarnings("ignore",
                                 category=np.VisibleDeprecationWarning)
         with self.assertRaises(Exception):
             reg = KNeighborsRegressor()
             reg.fit([[1, 2], [
                 6,
             ], [8, 9]], [[20, 5], [8, 6]])
    def test_with_db_predict(self):
        reg = KNeighborsRegressor(n_neighbors=1)
        pod = POD()
        db = Database(
            np.array([1, 2, 3])[:, None],
            np.array([1, 5, 3])[:, None])
        rom = ReducedOrderModel(db, pod, reg)

        rom.fit()
        assert rom.predict([1]) == 1
        assert rom.predict([2]) == 5
        assert rom.predict([3]) == 3
 def test_params(self):
     reg = KNeighborsRegressor(n_neighbors=20, algorithm='kd_tree')
     assert reg.regressor.get_params()['n_neighbors'] == 20
     assert reg.regressor.get_params()['algorithm'] == 'kd_tree'
 def test_fit_biparam_bifunc(self):
     reg = KNeighborsRegressor()
     reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
     assert reg.regressor.n_samples_fit_ == 3
 def test_fit_biparam_scalarfunc(self):
     reg = KNeighborsRegressor()
     reg.fit([[1, 2], [6, 7], [8, 9]], [1, 5, 6])
     assert reg.regressor.n_samples_fit_ == 3
 def test_fit_scalarparam_scalarfunc(self):
     reg = KNeighborsRegressor()
     reg.fit([1, 2, 5, 7, 2], [2, 5, 7, 83, 3])
     assert reg.regressor.n_samples_fit_ == 5
 def test_fit_onescalarparam_scalarfunc(self):
     reg = KNeighborsRegressor()
     reg.fit([1], [20])
     assert reg.regressor.n_samples_fit_ == 1
Beispiel #10
0
# Moreover, new state-of-the-art methods will arrive, so we invite you to read the [documentation](https://mathlab.github.io/EZyRB/) for the complete list of all the possibilities!
#
# In the next cell, we create two dictionaries with the objects, such that we can easily test everything with simple `for` cycles. **WARNING** since several methods require the solution of an optimization problem (eg. GPR, ANN, AE), the cell may require some minutes to be run.

# In[9]:

reductions = {
    'POD': POD('svd', rank=10),
    'AE': AE([200, 100, 10], [10, 100, 200], nn.Tanh(), nn.Tanh(), 10),
}

approximations = {
    #    'Linear': Linear(),
    'RBF': RBF(),
    'GPR': GPR(),
    'KNeighbors': KNeighborsRegressor(),
    'RadiusNeighbors': RadiusNeighborsRegressor(),
    'ANN': ANN([20, 20], nn.Tanh(), 10),
}

header = '{:10s}'.format('')
for name in approximations:
    header += ' {:>15s}'.format(name)

print(header)
for redname, redclass in reductions.items():
    row = '{:10s}'.format(redname)
    for approxname, approxclass in approximations.items():
        rom = ROM(db, redclass, approxclass)
        rom.fit()
        row += ' {:15e}'.format(rom.kfold_cv_error(n_splits=5).mean())