Ejemplo n.º 1
0
 def test_apply_gradient(self):
     optimizer_def = optim.Adagrad(learning_rate=0.1, eps=0.01)
     params = np.array([1.])
     state = optim.OptimizerState(1, _AdagradParamState(np.array([0.1])))
     grads = np.array([4.])
     new_params, new_state = optimizer_def.apply_gradient(
         optimizer_def.hyper_params, params, state, grads)
     expected_new_state = optim.OptimizerState(
         2, _AdagradParamState(np.array([16.1])))
     expected_new_params = np.array([0.9005588])
     np.testing.assert_allclose(new_params, expected_new_params)
     self.assertEqual(new_state, expected_new_state)
Ejemplo n.º 2
0
  def test_init_state(self):
    params = onp.zeros((1,))
    optimizer_def = optim.Adagrad(learning_rate=0.1, eps=0.01)
    state = optimizer_def.init_state(params)

    expected_hyper_params = _AdagradHyperParams(0.1, 0.01)
    self.assertEqual(optimizer_def.hyper_params, expected_hyper_params)
    expected_state = optim.OptimizerState(
        0, _AdagradParamState(onp.zeros((1,))))
    self.assertEqual(state, expected_state)