def test_LFM_gradient(artificial_data, models): reg_truth = Regressor(ss=models[0]) reg_truth._use_penalty = False reg_truth._use_jacobian = True dt, u, u1, y, *_ = reg_truth._prepare_data(artificial_data, ['To', 'Qh'], 'Ti') reg_lfm = Regressor(ss=models[1]) reg_lfm._use_penalty = False reg_lfm._use_jacobian = True eta_truth = deepcopy(reg_truth.ss.parameters.eta_free) eta_lfm = deepcopy(reg_lfm.ss.parameters.eta_free) grad_truth = reg_truth._eval_dlog_posterior(eta_truth, dt, u, u1, y)[1] grad_lfm = reg_lfm._eval_dlog_posterior(eta_lfm, dt, u, u1, y)[1] fct = nd.Gradient(reg_truth._eval_log_posterior) grad_truth_approx = fct(eta_truth, dt, u, u1, y) assert np.all(eta_truth == eta_lfm) assert ned(grad_truth, grad_lfm) < 1e-7 assert ned(grad_truth, grad_truth_approx) < 1e-7 assert np.all(np.sign(grad_truth) == np.sign(grad_truth_approx)) assert np.all(np.sign(grad_truth) == np.sign(grad_lfm)) assert grad_truth == pytest.approx(grad_truth_approx, rel=1e-6) assert grad_truth == pytest.approx(grad_lfm, rel=1e-6)
def check_grad_fd(data, reg): reg._use_penalty = False reg._use_jacobian = True reg.ss.method = 'mfd' dt, u, u1, y, _ = reg._prepare_data(data, None, 'y') grad = reg._eval_dlog_posterior(reg.ss.parameters.eta_free, dt, u, u1, y)[1] grad_fct = nd.Gradient(reg._eval_log_posterior) grad_fd = grad_fct(reg.ss.parameters.eta_free, dt, u, u1, y) assert ned(grad, grad_fd) < 1e-7 assert np.all(np.sign(grad) == np.sign(grad_fd)) assert grad == pytest.approx(grad_fd, rel=1e-6)
def test_gradient_RCModel(artificial_data_rc, reg, inputs, outputs): """Compare regressor gradient with numerical differentiation""" reg._use_penalty = False reg._use_jacobian = True dt, u, u1, y, *_ = reg._prepare_data(artificial_data_rc, inputs, outputs) eta = reg.ss.parameters.eta_free.copy() grad = reg._eval_dlog_posterior(eta, dt, u, u1, y)[1] fct = nd.Gradient(reg._eval_log_posterior) grad_fd = fct(eta, dt, u, u1, y) assert ned(grad, grad_fd) < 1e-7 assert np.all(np.sign(grad) == np.sign(grad_fd)) assert grad == pytest.approx(grad_fd, rel=1e-6)
def check_grad(data, reg1, reg2): reg1._use_penalty = False reg1._use_jacobian = True reg2._use_penalty = False reg2._use_jacobian = True reg1.ss.method = 'mfd' reg2.ss.method = 'mfd' dt, u, u1, y, _ = reg1._prepare_data(data, None, 'y') grad1 = reg1._eval_dlog_posterior(reg1.ss.parameters.eta_free, dt, u, u1, y)[1] grad2 = reg2._eval_dlog_posterior(reg2.ss.parameters.eta_free, dt, u, u1, y)[1] assert ned(grad1, grad2) < 1e-7 assert np.all(np.sign(grad1) == np.sign(grad2)) assert grad1 == pytest.approx(grad2, rel=1e-6)