Exemplo n.º 1
0
 def test_default_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 14))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Exemplo n.º 2
0
 def test_default_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 14))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Exemplo n.º 3
0
 def test_default_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 7))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 69))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Exemplo n.º 4
0
 def test_params(self):
     opt, data = util.build_rosen('adadelta')
     for _ in opt.iterate(data,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Exemplo n.º 5
0
 def test_default_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.step_increase.eval(), 1.01)
         assert np.allclose(opt.step_decrease.eval(), 0.99)
         assert np.allclose(opt.min_step.eval(), 0)
         assert np.allclose(opt.max_step.eval(), 100)
         break
Exemplo n.º 6
0
 def test_default_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.step_increase.eval(), 1.01)
         assert np.allclose(opt.step_decrease.eval(), 0.99)
         assert np.allclose(opt.min_step.eval(), 0)
         assert np.allclose(opt.max_step.eval(), 100)
         break
Exemplo n.º 7
0
 def test_default_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.beta1_decay.eval(), 1 - 1e-6)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 7))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 69))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Exemplo n.º 8
0
 def test_params(self):
     opt, data = util.build_rosen('esgd')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Exemplo n.º 9
0
    def test_rosen(self):
        opt, train = util.build_rosen('tester')
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            assert i < 3
Exemplo n.º 10
0
    def test_rosen(self):
        opt, train = util.build_rosen('tester')
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            assert i < 3
Exemplo n.º 11
0
 def test_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Exemplo n.º 12
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('tester')
     for _ in opt.iterate(data, gradient_clip=1):
         assert opt.max_gradient_elem == 1
         break
     for _ in opt.iterate(data, max_gradient_clip=2):
         assert opt.max_gradient_elem == 2
         break
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
Exemplo n.º 13
0
 def test_params(self):
     opt, data = util.build_rosen('esgd')
     opt.hv_method = 'lop'  # TODO(leif): incorporate into downhill.build()?
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Exemplo n.º 14
0
 def test_default_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data):
         assert opt.nesterov == True
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.momentum, 0)
         assert np.allclose(opt.patience, 5)
         assert np.allclose(opt.min_improvement, 0)
         assert np.allclose(opt.max_gradient_norm, 0)
         assert np.allclose(opt.max_gradient_elem, 0)
         break
Exemplo n.º 15
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('tester')
     for _ in opt.iterate(data, gradient_clip=1):
         assert opt.max_gradient_elem == 1
         break
     for _ in opt.iterate(data, max_gradient_clip=2):
         assert opt.max_gradient_elem == 2
         break
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
Exemplo n.º 16
0
 def test_default_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data):
         assert opt.nesterov == True
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.momentum, 0)
         assert np.allclose(opt.patience, 5)
         assert np.allclose(opt.min_improvement, 0)
         assert np.allclose(opt.max_gradient_norm, 0)
         assert np.allclose(opt.max_gradient_elem, 0)
         break
Exemplo n.º 17
0
 def test_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          beta1_halflife=10,
                          beta2_halflife=20,
                          rms_regularizer=11):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 20))
         assert np.allclose(opt.epsilon.eval(), 11)
         break
Exemplo n.º 18
0
    def test_rosen_unnamed(self):
        opt, train = util.build_rosen('tester', name=False, monitor_gradients=True)
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            # check there's a manually-named parameter in here.
            assert 1 == sum(1 for k in tm if 'unnamed' in k), tm
            assert i < 3
Exemplo n.º 19
0
    def test_rosen_unnamed(self):
        opt, train = util.build_rosen('tester',
                                      name=False,
                                      monitor_gradients=True)
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            # check there's a manually-named parameter in here.
            assert 1 == sum(1 for k in tm if 'unnamed' in k), tm
            assert i < 3
Exemplo n.º 20
0
 def test_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rprop_increase=22,
                          rprop_decrease=101,
                          rprop_min_step=50,
                          rprop_max_step=-10):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.step_increase.eval(), 22)
         assert np.allclose(opt.step_decrease.eval(), 101)
         assert np.allclose(opt.min_step.eval(), 50)
         assert np.allclose(opt.max_step.eval(), -10)
         break
Exemplo n.º 21
0
 def test_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          beta1_decay=0.5,
                          beta1_halflife=10,
                          beta2_halflife=20,
                          rms_regularizer=11):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.beta1_decay.eval(), 0.5)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 20))
         assert np.allclose(opt.epsilon.eval(), 11)
         break
Exemplo n.º 22
0
 def test_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rprop_increase=22,
                          rprop_decrease=101,
                          rprop_min_step=50,
                          rprop_max_step=-10):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.step_increase.eval(), 22)
         assert np.allclose(opt.step_decrease.eval(), 101)
         assert np.allclose(opt.min_step.eval(), 50)
         assert np.allclose(opt.max_step.eval(), -10)
         break
Exemplo n.º 23
0
 def test_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          momentum=10,
                          patience=20,
                          min_improvement=0.1,
                          max_gradient_elem=4,
                          max_gradient_norm=5,
                          nesterov=False):
         assert opt.nesterov == True  # nesterov always True for NAG
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.momentum, 10)
         assert np.allclose(opt.patience, 20)
         assert np.allclose(opt.min_improvement, 0.1)
         assert np.allclose(opt.max_gradient_norm, 5)
         assert np.allclose(opt.max_gradient_elem, 4)
         break
Exemplo n.º 24
0
 def test_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          momentum=10,
                          patience=20,
                          min_improvement=0.1,
                          max_gradient_elem=4,
                          max_gradient_norm=5,
                          nesterov=False):
         assert opt.nesterov == True  # nesterov always True for NAG
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.momentum, 10)
         assert np.allclose(opt.patience, 20)
         assert np.allclose(opt.min_improvement, 0.1)
         assert np.allclose(opt.max_gradient_norm, 5)
         assert np.allclose(opt.max_gradient_elem, 4)
         break
Exemplo n.º 25
0
 def test_sgd(self):
     assert isinstance(util.build_rosen('sgd')[0], downhill.SGD)
     assert isinstance(util.build_factor('sgd')[0], downhill.SGD)
Exemplo n.º 26
0
 def test_set_best_params(self):
     opt, _ = util.build_rosen('tester')
     opt._best_params = [[1, 2]]
     opt.set_params('best')
     assert np.allclose(opt._params[0].get_value(), [1, 2])
Exemplo n.º 27
0
 def test_adam(self):
     assert isinstance(util.build_rosen('Adam')[0], downhill.Adam)
Exemplo n.º 28
0
 def test_default_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Exemplo n.º 29
0
 def test_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data, rms_regularizer=0.1):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 0.1)
         break
Exemplo n.º 30
0
 def test_esgd(self):
     assert isinstance(util.build_rosen('EsGd')[0], downhill.ESGD)
Exemplo n.º 31
0
 def test_nag(self):
     assert isinstance(util.build_rosen('nag')[0], downhill.NAG)
Exemplo n.º 32
0
 def test_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data, regularizer=0.1):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 0.1)
         break
Exemplo n.º 33
0
 def test_set_params(self):
     opt, _ = util.build_rosen('straight')
     opt.set_params([[1, 2]])
     assert np.allclose(opt._params[0].get_value(), [1, 2])
Exemplo n.º 34
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adagrad'))
Exemplo n.º 35
0
 def test_adam(self):
     assert isinstance(util.build_rosen('Adam')[0], downhill.Adam)
Exemplo n.º 36
0
 def test_adadelta(self):
     assert isinstance(util.build_rosen('ADADELTA')[0], downhill.ADADELTA)
Exemplo n.º 37
0
 def test_nag(self):
     assert isinstance(util.build_rosen('nag')[0], downhill.NAG)
Exemplo n.º 38
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
Exemplo n.º 39
0
 def test_rmsprop(self):
     assert isinstance(util.build_rosen('RmsProp')[0], downhill.RMSProp)
Exemplo n.º 40
0
 def test_rosen(self):
     util.assert_progress(
         *util.build_rosen('sgd'),
         monitor_gradients=True)
Exemplo n.º 41
0
 def test_adadelta(self):
     assert isinstance(util.build_rosen('ADADELTA')[0], downhill.ADADELTA)
Exemplo n.º 42
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)
Exemplo n.º 43
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
Exemplo n.º 44
0
 def test_rmsprop(self):
     assert isinstance(util.build_rosen('RmsProp')[0], downhill.RMSProp)
Exemplo n.º 45
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
Exemplo n.º 46
0
 def test_esgd(self):
     assert isinstance(util.build_rosen('EsGd')[0], downhill.ESGD)
Exemplo n.º 47
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('sgd'), monitor_gradients=True)
Exemplo n.º 48
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
Exemplo n.º 49
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('straight')
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
Exemplo n.º 50
0
 def test_sgd(self):
     assert isinstance(util.build_rosen('sgd')[0], downhill.SGD)
     assert isinstance(util.build_factor('sgd')[0], downhill.SGD)
Exemplo n.º 51
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adadelta'))
Exemplo n.º 52
0
 def test_set_best_params(self):
     opt, _ = util.build_rosen('tester')
     opt._best_params = [[1, 2]]
     opt.set_params('best')
     assert np.allclose(opt._params[0].get_value(), [1, 2])
Exemplo n.º 53
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)