Esempio n. 1
0
    def test_learning_rate_value(self):
        lr = -1.0
        with pytest.raises(ValueError):
            lr_schedules.ExponentialDecayLR(lr, decay_rate, decay_steps)

        with pytest.raises(ValueError):
            lr_schedules.PolynomialDecayLR(lr, end_learning_rate, decay_steps,
                                           power)
Esempio n. 2
0
    def test_learning_rate_type(self):
        lr = True
        with pytest.raises(TypeError):
            lr_schedules.ExponentialDecayLR(lr, decay_rate, decay_steps)

        with pytest.raises(TypeError):
            lr_schedules.PolynomialDecayLR(lr, end_learning_rate, decay_steps,
                                           power)
Esempio n. 3
0
    def __init__(self, decay_steps, warmup_steps=0, learning_rate=0.1, end_learning_rate=0.0001, power=1.0):
        super(BertLearningRate, self).__init__()
        self.warmup_lr = lr_schedules.WarmUpLR(learning_rate, warmup_steps)
        self.decay_lr = lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps, power)
        self.warmup_steps = Tensor(np.array([warmup_steps]).astype(np.float32))

        self.greater = P.Greater()
        self.one = Tensor(np.array([1.0]).astype(np.float32))
        self.cast = P.Cast()
Esempio n. 4
0
    def test_decay_steps_value(self):
        decay_steps_e = -2
        with pytest.raises(ValueError):
            lr_schedules.ExponentialDecayLR(learning_rate, decay_rate,
                                            decay_steps_e)

        with pytest.raises(ValueError):
            lr_schedules.CosineDecayLR(min_lr, max_lr, decay_steps_e)

        with pytest.raises(ValueError):
            lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate,
                                           decay_steps_e, power)
Esempio n. 5
0
def test_adamweightdecay_group():
    """ test_adam_group_lr_and_weight_decay """
    inputs = Tensor(np.ones([1, 64]).astype(np.float32))
    label = Tensor(np.zeros([1, 10]).astype(np.float32))
    net = Net()
    net.set_train()
    loss = nn.SoftmaxCrossEntropyWithLogits()
    net_with_loss = WithLossCell(net, loss)
    all_params = net.trainable_params()

    schedule_lr = lr_schedules.PolynomialDecayLR(0.01, 0.0001, 3, power=1.0)
    group_params = [{
        'params': [all_params[0]],
        'lr': 0.02,
        'weight_decay': 0.9
    }, {
        'params': [all_params[1]]
    }]
    optimizer = nn.AdamWeightDecay(group_params, learning_rate=schedule_lr)
    train_network = TrainOneStepCell(net_with_loss, optimizer)
    _executor.compile(train_network, inputs, label)
Esempio n. 6
0
def test_polynomial_decay2():
    lr_schedule = lr_schedules.PolynomialDecayLR(learning_rate,
                                                 end_learning_rate,
                                                 decay_steps, power, True)
    _executor.compile(lr_schedule, global_step)
Esempio n. 7
0
 def test_power(self):
     power1 = True
     with pytest.raises(TypeError):
         lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate,
                                        decay_steps, power1)