Exemplo n.º 1
0
    def test_init_state(self):
        params = np.zeros((1, ))
        optimizer_def = optim.Adadelta(learning_rate=0.1,
                                       rho=0.9,
                                       eps=1e-6,
                                       weight_decay=0.1)
        state = optimizer_def.init_state(params)

        expected_hyper_params = _AdadeltaHyperParams(0.1, 0.9, 1e-6, 0.1)
        self.assertEqual(optimizer_def.hyper_params, expected_hyper_params)
        expected_state = optim.OptimizerState(
            0, _AdadeltaParamState(np.zeros((1, )), np.zeros((1, ))))
        self.assertEqual(state, expected_state)
Exemplo n.º 2
0
 def test_apply_gradient(self):
     optimizer_def = optim.Adadelta(learning_rate=0.1,
                                    rho=0.9,
                                    eps=1e-6,
                                    weight_decay=0.1)
     params = np.array([1.])
     state = optim.OptimizerState(
         1, _AdadeltaParamState(np.zeros((1, )), np.zeros((1, ))))
     grads = np.array([1.])
     new_param, new_state = optimizer_def.apply_gradient(
         optimizer_def.hyper_params, params, state, grads)
     expected_new_state = optim.OptimizerState(
         2, _AdadeltaParamState(np.array([0.1]), np.array([9.999902e-7])))
     expected_new_params = np.array([0.9896838])
     np.testing.assert_allclose(new_param, expected_new_params)
     self.assertEqual(new_state, expected_new_state)