Пример #1
0
def test_polynomial_decay():
    lr1 = dr.polynomial_decay_lr(learning_rate, end_learning_rate, total_step,
                                 step_per_epoch, decay_epoch, power)
    assert len(lr1) == total_step
    lr2 = dr.polynomial_decay_lr(learning_rate, end_learning_rate, total_step,
                                 step_per_epoch, decay_epoch, power, True)
    assert len(lr2) == total_step
Пример #2
0
    def test_learning_rate_value(self):
        lr = -1.0
        with pytest.raises(ValueError):
            dr.exponential_decay_lr(lr, decay_rate, total_step, step_per_epoch, decay_epoch)

        with pytest.raises(ValueError):
            dr.polynomial_decay_lr(lr, end_learning_rate, total_step, step_per_epoch, decay_epoch, power)
Пример #3
0
    def test_total_step1(self):
        total_step1 = 2.0
        with pytest.raises(ValueError):
            dr.exponential_decay_lr(learning_rate, decay_rate, total_step1, step_per_epoch, decay_epoch)

        with pytest.raises(ValueError):
            dr.cosine_decay_lr(min_lr, max_lr, total_step1, step_per_epoch, decay_epoch)

        with pytest.raises(ValueError):
            dr.polynomial_decay_lr(learning_rate, end_learning_rate, total_step1, step_per_epoch, decay_epoch, power)
Пример #4
0
    def test_decay_epoch1(self):
        decay_epoch1 = 'm'
        with pytest.raises(TypeError):
            dr.exponential_decay_lr(learning_rate, decay_rate, total_step,
                                    step_per_epoch, decay_epoch1)

        with pytest.raises(TypeError):
            dr.cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch,
                               decay_epoch1)

        with pytest.raises(TypeError):
            dr.polynomial_decay_lr(learning_rate, end_learning_rate,
                                   total_step, step_per_epoch, decay_epoch1,
                                   power)
Пример #5
0
def test_adam_group1():
    """ test_adam_group_lr_and_weight_decay """
    inputs = Tensor(np.ones([1, 64]).astype(np.float32))
    label = Tensor(np.zeros([1, 10]).astype(np.float32))
    net = Net()
    net.set_train()
    loss = nn.SoftmaxCrossEntropyWithLogits()
    net_with_loss = WithLossCell(net, loss)
    all_params = net.trainable_params()

    poly_decay_lr = polynomial_decay_lr(0.01,
                                        0.0001,
                                        total_step=10,
                                        step_per_epoch=1,
                                        decay_epoch=3,
                                        power=1.0)

    group_params = [{
        'params': [all_params[0]],
        'lr': poly_decay_lr,
        'weight_decay': 0.9
    }, {
        'params': [all_params[1]]
    }]
    optimizer = nn.Adam(group_params, learning_rate=0.1)

    train_network = TrainOneStepCell(net_with_loss, optimizer)
    _executor.compile(train_network, inputs, label)
Пример #6
0
 def test_end_learning_rate_type(self):
     lr = True
     with pytest.raises(TypeError):
         dr.polynomial_decay_lr(learning_rate, lr, total_step,
                                step_per_epoch, decay_epoch, power)
Пример #7
0
 def test_update_decay_epoch(self):
     update_decay_epoch = 1
     with pytest.raises(TypeError):
         dr.polynomial_decay_lr(learning_rate, end_learning_rate,
                                total_step, step_per_epoch, decay_epoch,
                                power, update_decay_epoch)
Пример #8
0
 def test_power(self):
     power1 = True
     with pytest.raises(TypeError):
         dr.polynomial_decay_lr(learning_rate, end_learning_rate,
                                total_step, step_per_epoch, decay_epoch,
                                power1)