Пример #1
0
 def test_default_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 14))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Пример #2
0
 def test_default_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 14))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Пример #3
0
 def test_default_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 7))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 69))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Пример #4
0
 def test_params(self):
     opt, data = util.build_rosen('adadelta')
     for _ in opt.iterate(data,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Пример #5
0
 def test_default_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.step_increase.eval(), 1.01)
         assert np.allclose(opt.step_decrease.eval(), 0.99)
         assert np.allclose(opt.min_step.eval(), 0)
         assert np.allclose(opt.max_step.eval(), 100)
         break
Пример #6
0
 def test_default_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.step_increase.eval(), 1.01)
         assert np.allclose(opt.step_decrease.eval(), 0.99)
         assert np.allclose(opt.min_step.eval(), 0)
         assert np.allclose(opt.max_step.eval(), 100)
         break
Пример #7
0
 def test_default_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.beta1_decay.eval(), 1 - 1e-6)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 7))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 69))
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Пример #8
0
 def test_params(self):
     opt, data = util.build_rosen('esgd')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Пример #9
0
    def test_rosen(self):
        opt, train = util.build_rosen('tester')
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            assert i < 3
Пример #10
0
    def test_rosen(self):
        opt, train = util.build_rosen('tester')
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            assert i < 3
Пример #11
0
 def test_params(self):
     opt, data = util.build_rosen('rmsprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Пример #12
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('tester')
     for _ in opt.iterate(data, gradient_clip=1):
         assert opt.max_gradient_elem == 1
         break
     for _ in opt.iterate(data, max_gradient_clip=2):
         assert opt.max_gradient_elem == 2
         break
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
Пример #13
0
 def test_params(self):
     opt, data = util.build_rosen('esgd')
     opt.hv_method = 'lop'  # TODO(leif): incorporate into downhill.build()?
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rms_halflife=10,
                          rms_regularizer=20):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.ewma.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.epsilon.eval(), 20)
         break
Пример #14
0
 def test_default_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data):
         assert opt.nesterov == True
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.momentum, 0)
         assert np.allclose(opt.patience, 5)
         assert np.allclose(opt.min_improvement, 0)
         assert np.allclose(opt.max_gradient_norm, 0)
         assert np.allclose(opt.max_gradient_elem, 0)
         break
Пример #15
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('tester')
     for _ in opt.iterate(data, gradient_clip=1):
         assert opt.max_gradient_elem == 1
         break
     for _ in opt.iterate(data, max_gradient_clip=2):
         assert opt.max_gradient_elem == 2
         break
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
Пример #16
0
 def test_default_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data):
         assert opt.nesterov == True
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.momentum, 0)
         assert np.allclose(opt.patience, 5)
         assert np.allclose(opt.min_improvement, 0)
         assert np.allclose(opt.max_gradient_norm, 0)
         assert np.allclose(opt.max_gradient_elem, 0)
         break
Пример #17
0
 def test_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          beta1_halflife=10,
                          beta2_halflife=20,
                          rms_regularizer=11):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 20))
         assert np.allclose(opt.epsilon.eval(), 11)
         break
Пример #18
0
    def test_rosen_unnamed(self):
        opt, train = util.build_rosen('tester', name=False, monitor_gradients=True)
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            # check there's a manually-named parameter in here.
            assert 1 == sum(1 for k in tm if 'unnamed' in k), tm
            assert i < 3
Пример #19
0
    def test_rosen_unnamed(self):
        opt, train = util.build_rosen('tester',
                                      name=False,
                                      monitor_gradients=True)
        assert isinstance(opt, Tester)

        # run the optimizer for three iterations. check that the x and y values
        # (being monitored) increase at each iteration.
        for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
            assert tm['x'] >= vm['x']
            assert tm['y'] >= vm['y']
            # check there's a manually-named parameter in here.
            assert 1 == sum(1 for k in tm if 'unnamed' in k), tm
            assert i < 3
Пример #20
0
 def test_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rprop_increase=22,
                          rprop_decrease=101,
                          rprop_min_step=50,
                          rprop_max_step=-10):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.step_increase.eval(), 22)
         assert np.allclose(opt.step_decrease.eval(), 101)
         assert np.allclose(opt.min_step.eval(), 50)
         assert np.allclose(opt.max_step.eval(), -10)
         break
Пример #21
0
 def test_params(self):
     opt, data = util.build_rosen('adam')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          beta1_decay=0.5,
                          beta1_halflife=10,
                          beta2_halflife=20,
                          rms_regularizer=11):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.beta1_decay.eval(), 0.5)
         assert np.allclose(opt.beta1.eval(), np.exp(-np.log(2) / 10))
         assert np.allclose(opt.beta2.eval(), np.exp(-np.log(2) / 20))
         assert np.allclose(opt.epsilon.eval(), 11)
         break
Пример #22
0
 def test_params(self):
     opt, data = util.build_rosen('rprop')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          rprop_increase=22,
                          rprop_decrease=101,
                          rprop_min_step=50,
                          rprop_max_step=-10):
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.step_increase.eval(), 22)
         assert np.allclose(opt.step_decrease.eval(), 101)
         assert np.allclose(opt.min_step.eval(), 50)
         assert np.allclose(opt.max_step.eval(), -10)
         break
Пример #23
0
 def test_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          momentum=10,
                          patience=20,
                          min_improvement=0.1,
                          max_gradient_elem=4,
                          max_gradient_norm=5,
                          nesterov=False):
         assert opt.nesterov == True  # nesterov always True for NAG
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.momentum, 10)
         assert np.allclose(opt.patience, 20)
         assert np.allclose(opt.min_improvement, 0.1)
         assert np.allclose(opt.max_gradient_norm, 5)
         assert np.allclose(opt.max_gradient_elem, 4)
         break
Пример #24
0
 def test_params(self):
     opt, data = util.build_rosen('nag')
     for _ in opt.iterate(data,
                          learning_rate=0.3,
                          momentum=10,
                          patience=20,
                          min_improvement=0.1,
                          max_gradient_elem=4,
                          max_gradient_norm=5,
                          nesterov=False):
         assert opt.nesterov == True  # nesterov always True for NAG
         assert np.allclose(opt.learning_rate.eval(), 0.3)
         assert np.allclose(opt.momentum, 10)
         assert np.allclose(opt.patience, 20)
         assert np.allclose(opt.min_improvement, 0.1)
         assert np.allclose(opt.max_gradient_norm, 5)
         assert np.allclose(opt.max_gradient_elem, 4)
         break
Пример #25
0
 def test_sgd(self):
     assert isinstance(util.build_rosen('sgd')[0], downhill.SGD)
     assert isinstance(util.build_factor('sgd')[0], downhill.SGD)
Пример #26
0
 def test_set_best_params(self):
     opt, _ = util.build_rosen('tester')
     opt._best_params = [[1, 2]]
     opt.set_params('best')
     assert np.allclose(opt._params[0].get_value(), [1, 2])
Пример #27
0
 def test_adam(self):
     assert isinstance(util.build_rosen('Adam')[0], downhill.Adam)
Пример #28
0
 def test_default_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 1e-8)
         break
Пример #29
0
 def test_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data, rms_regularizer=0.1):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 0.1)
         break
Пример #30
0
 def test_esgd(self):
     assert isinstance(util.build_rosen('EsGd')[0], downhill.ESGD)
Пример #31
0
 def test_nag(self):
     assert isinstance(util.build_rosen('nag')[0], downhill.NAG)
Пример #32
0
 def test_params(self):
     opt, data = util.build_rosen('adagrad')
     for _ in opt.iterate(data, regularizer=0.1):
         assert np.allclose(opt.learning_rate.eval(), 1e-4)
         assert np.allclose(opt.epsilon.eval(), 0.1)
         break
Пример #33
0
 def test_set_params(self):
     opt, _ = util.build_rosen('straight')
     opt.set_params([[1, 2]])
     assert np.allclose(opt._params[0].get_value(), [1, 2])
Пример #34
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adagrad'))
Пример #35
0
 def test_adam(self):
     assert isinstance(util.build_rosen('Adam')[0], downhill.Adam)
Пример #36
0
 def test_adadelta(self):
     assert isinstance(util.build_rosen('ADADELTA')[0], downhill.ADADELTA)
Пример #37
0
 def test_nag(self):
     assert isinstance(util.build_rosen('nag')[0], downhill.NAG)
Пример #38
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
Пример #39
0
 def test_rmsprop(self):
     assert isinstance(util.build_rosen('RmsProp')[0], downhill.RMSProp)
Пример #40
0
 def test_rosen(self):
     util.assert_progress(
         *util.build_rosen('sgd'),
         monitor_gradients=True)
Пример #41
0
 def test_adadelta(self):
     assert isinstance(util.build_rosen('ADADELTA')[0], downhill.ADADELTA)
Пример #42
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)
Пример #43
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
Пример #44
0
 def test_rmsprop(self):
     assert isinstance(util.build_rosen('RmsProp')[0], downhill.RMSProp)
Пример #45
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
Пример #46
0
 def test_esgd(self):
     assert isinstance(util.build_rosen('EsGd')[0], downhill.ESGD)
Пример #47
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('sgd'), monitor_gradients=True)
Пример #48
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
Пример #49
0
 def test_gradient_clip(self):
     opt, data = util.build_rosen('straight')
     for _ in opt.iterate(data, max_gradient_elem=3):
         assert opt.max_gradient_elem == 3
         break
Пример #50
0
 def test_sgd(self):
     assert isinstance(util.build_rosen('sgd')[0], downhill.SGD)
     assert isinstance(util.build_factor('sgd')[0], downhill.SGD)
Пример #51
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adadelta'))
Пример #52
0
 def test_set_best_params(self):
     opt, _ = util.build_rosen('tester')
     opt._best_params = [[1, 2]]
     opt.set_params('best')
     assert np.allclose(opt._params[0].get_value(), [1, 2])
Пример #53
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)