def test_univariate_scalar_adagrad_optimization(self): def objective_func(x): return x * np.log(x) var_init = np.array([2]) optimizer = Optimizer(objective_func, var_init) min_value, var_value = optimizer.adagrad_optimize( tolerance=None, num_iterations=100000) self.assertAlmostEqual(min_value, -1 / np.e, places=3) self.assertAlmostEqual(var_value[0], 1 / np.e, places=3)
def test_multivariate_vector_adagrad_optimization(self): def objective_func(x): return x[0]**2 + x[0] * x[1] + x[1]**2 var_init = np.array([0.2, 0.5]) optimizer = Optimizer(objective_func, var_init, scalar=False) min_value, var_value = optimizer.adagrad_optimize(tolerance=None, num_iterations=10000) self.assertAlmostEqual(min_value, 0, places=4) self.assertAlmostEqual(var_value[0], 0, places=4) self.assertAlmostEqual(var_value[1], 0, places=4)