示例#1
0
 def test_default_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 14))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
示例#2
0
 def test_default_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 14))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
示例#3
0
 def test_default_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 7))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 69))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
示例#4
0
 def test_params(self):
     opt, data = util.build_rosen('adadelta')
     for _ in opt.iterate(data,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
示例#5
0
 def test_default_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.step_increase.eval(), 1.01)
         assert np.allclose(opt.step_decrease.eval(), 0.99)
         assert np.allclose(opt.min_step.eval(), 0)
         assert np.allclose(opt.max_step.eval(), 100)
         break
示例#6
0
 def test_default_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.step_increase.eval(), 1.01)
         assert np.allclose(opt.step_decrease.eval(), 0.99)
         assert np.allclose(opt.min_step.eval(), 0)
         assert np.allclose(opt.max_step.eval(), 100)
         break
示例#7
0
 def test_default_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.beta1_decay.eval(), 1 - 1e-6)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 7))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 69))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
示例#8
0
 def test_params(self):
     opt, data = util.build_rosen('esgd')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
示例#9
0
    def test_rosen(self):
        opt, train = util.build_rosen('tester')
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            assert i < 3
示例#10
0
    def test_rosen(self):
        opt, train = util.build_rosen('tester')
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            assert i < 3
示例#11
0
 def test_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
示例#12
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('tester')
     for _ in opt.iterate(data, gradient_clip=1):
         assert opt.max_gradient_elem == 1
         break
     for _ in opt.iterate(data, max_gradient_clip=2):
         assert opt.max_gradient_elem == 2
         break
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
示例#13
0
 def test_params(self):
     opt, data = util.build_rosen('esgd')
     opt.hv_method = 'lop'  # TODO(leif): incorporate into downhill.build()?
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
示例#14
0
 def test_default_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data):
         assert opt.nesterov == True
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.momentum, 0)
         assert np.allclose(opt.patience, 5)
         assert np.allclose(opt.min_improvement, 0)
         assert np.allclose(opt.max_gradient_norm, 0)
         assert np.allclose(opt.max_gradient_elem, 0)
         break
示例#15
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('tester')
     for _ in opt.iterate(data, gradient_clip=1):
         assert opt.max_gradient_elem == 1
         break
     for _ in opt.iterate(data, max_gradient_clip=2):
         assert opt.max_gradient_elem == 2
         break
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
示例#16
0
 def test_default_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data):
         assert opt.nesterov == True
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.momentum, 0)
         assert np.allclose(opt.patience, 5)
         assert np.allclose(opt.min_improvement, 0)
         assert np.allclose(opt.max_gradient_norm, 0)
         assert np.allclose(opt.max_gradient_elem, 0)
         break
示例#17
0
 def test_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          beta1_halflife=10,
                          beta2_halflife=20,
                          rms_regularizer=11):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 20))
         assert np.allclose(opt.epsilon.eval(), 11)
         break
示例#18
0
    def test_rosen_unnamed(self):
        opt, train = util.build_rosen('tester', name=False, monitor_gradients=True)
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            # check there's a manually-named parameter in here.
            assert 1 == sum(1 for k in tm if 'unnamed' in k), tm
            assert i < 3
示例#19
0
    def test_rosen_unnamed(self):
        opt, train = util.build_rosen('tester',
                                      name=False,
                                      monitor_gradients=True)
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            # check there's a manually-named parameter in here.
            assert 1 == sum(1 for k in tm if 'unnamed' in k), tm
            assert i < 3
示例#20
0
 def test_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rprop_increase=22,
                          rprop_decrease=101,
                          rprop_min_step=50,
                          rprop_max_step=-10):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.step_increase.eval(), 22)
         assert np.allclose(opt.step_decrease.eval(), 101)
         assert np.allclose(opt.min_step.eval(), 50)
         assert np.allclose(opt.max_step.eval(), -10)
         break
示例#21
0
 def test_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          beta1_decay=0.5,
                          beta1_halflife=10,
                          beta2_halflife=20,
                          rms_regularizer=11):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.beta1_decay.eval(), 0.5)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 20))
         assert np.allclose(opt.epsilon.eval(), 11)
         break
示例#22
0
 def test_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rprop_increase=22,
                          rprop_decrease=101,
                          rprop_min_step=50,
                          rprop_max_step=-10):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.step_increase.eval(), 22)
         assert np.allclose(opt.step_decrease.eval(), 101)
         assert np.allclose(opt.min_step.eval(), 50)
         assert np.allclose(opt.max_step.eval(), -10)
         break
示例#23
0
 def test_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          momentum=10,
                          patience=20,
                          min_improvement=0.1,
                          max_gradient_elem=4,
                          max_gradient_norm=5,
                          nesterov=False):
         assert opt.nesterov == True  # nesterov always True for NAG
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.momentum, 10)
         assert np.allclose(opt.patience, 20)
         assert np.allclose(opt.min_improvement, 0.1)
         assert np.allclose(opt.max_gradient_norm, 5)
         assert np.allclose(opt.max_gradient_elem, 4)
         break
示例#24
0
 def test_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          momentum=10,
                          patience=20,
                          min_improvement=0.1,
                          max_gradient_elem=4,
                          max_gradient_norm=5,
                          nesterov=False):
         assert opt.nesterov == True  # nesterov always True for NAG
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.momentum, 10)
         assert np.allclose(opt.patience, 20)
         assert np.allclose(opt.min_improvement, 0.1)
         assert np.allclose(opt.max_gradient_norm, 5)
         assert np.allclose(opt.max_gradient_elem, 4)
         break
示例#25
0
 def test_sgd(self):
     assert isinstance(util.build_rosen('sgd')[0], downhill.SGD)
     assert isinstance(util.build_factor('sgd')[0], downhill.SGD)
示例#26
0
 def test_set_best_params(self):
     opt, _ = util.build_rosen('tester')
     opt._best_params = [[1, 2]]
     opt.set_params('best')
     assert np.allclose(opt._params[0].get_value(), [1, 2])
示例#27
0
 def test_adam(self):
     assert isinstance(util.build_rosen('Adam')[0], downhill.Adam)
示例#28
0
 def test_default_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
示例#29
0
 def test_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data, rms_regularizer=0.1):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 0.1)
         break
示例#30
0
 def test_esgd(self):
     assert isinstance(util.build_rosen('EsGd')[0], downhill.ESGD)
示例#31
0
 def test_nag(self):
     assert isinstance(util.build_rosen('nag')[0], downhill.NAG)
示例#32
0
 def test_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data, regularizer=0.1):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 0.1)
         break
示例#33
0
 def test_set_params(self):
     opt, _ = util.build_rosen('straight')
     opt.set_params([[1, 2]])
     assert np.allclose(opt._params[0].get_value(), [1, 2])
示例#34
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adagrad'))
示例#35
0
 def test_adam(self):
     assert isinstance(util.build_rosen('Adam')[0], downhill.Adam)
示例#36
0
 def test_adadelta(self):
     assert isinstance(util.build_rosen('ADADELTA')[0], downhill.ADADELTA)
示例#37
0
 def test_nag(self):
     assert isinstance(util.build_rosen('nag')[0], downhill.NAG)
示例#38
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
示例#39
0
 def test_rmsprop(self):
     assert isinstance(util.build_rosen('RmsProp')[0], downhill.RMSProp)
示例#40
0
 def test_rosen(self):
     util.assert_progress(
         *util.build_rosen('sgd'),
         monitor_gradients=True)
示例#41
0
 def test_adadelta(self):
     assert isinstance(util.build_rosen('ADADELTA')[0], downhill.ADADELTA)
示例#42
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)
示例#43
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
示例#44
0
 def test_rmsprop(self):
     assert isinstance(util.build_rosen('RmsProp')[0], downhill.RMSProp)
示例#45
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
示例#46
0
 def test_esgd(self):
     assert isinstance(util.build_rosen('EsGd')[0], downhill.ESGD)
示例#47
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('sgd'), monitor_gradients=True)
示例#48
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
示例#49
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('straight')
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
示例#50
0
 def test_sgd(self):
     assert isinstance(util.build_rosen('sgd')[0], downhill.SGD)
     assert isinstance(util.build_factor('sgd')[0], downhill.SGD)
示例#51
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adadelta'))
示例#52
0
 def test_set_best_params(self):
     opt, _ = util.build_rosen('tester')
     opt._best_params = [[1, 2]]
     opt.set_params('best')
     assert np.allclose(opt._params[0].get_value(), [1, 2])
示例#53
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)