示例#1
0
    def test_lifecycle(
            self,
            modifier_lambda,
            model_lambda,
            optim_lambda,
            test_steps_per_epoch,  # noqa: F811
    ):
        modifier = modifier_lambda()
        model = model_lambda()
        optimizer = optim_lambda(model)
        self.initialize_helper(modifier, model)
        assert get_optim_learning_rate(optimizer) == INIT_LR

        for epoch in range(int(modifier.end_epoch) + 5):
            if epoch < modifier.start_epoch:
                expected = INIT_LR
            elif epoch < modifier.end_epoch or modifier.end_epoch == -1:
                expected = modifier.init_lr * (
                    modifier.lr_kwargs["gamma"]**math.floor(
                        (epoch - modifier.start_epoch) /
                        modifier.lr_kwargs["step_size"]))
            else:
                expected = modifier.init_lr * (modifier.lr_kwargs["gamma"]**(
                    math.floor((modifier.end_epoch - modifier.start_epoch) /
                               modifier.lr_kwargs["step_size"]) - 1))

            for step in range(test_steps_per_epoch):
                epoch_test = float(
                    epoch) + float(step) / float(test_steps_per_epoch)

                if epoch_test < modifier.start_epoch:
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)
                elif abs(epoch_test -
                         modifier.start_epoch) < sys.float_info.epsilon:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                elif epoch_test < modifier.end_epoch or modifier.end_epoch == -1:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                elif abs(epoch_test -
                         modifier.end_epoch) < sys.float_info.epsilon:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                else:
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)

                assert (abs(get_optim_learning_rate(optimizer) - expected) <
                        EPSILON), "Failed at epoch:{} step:{}".format(
                            epoch, step)
示例#2
0
    def test_lifecycle(
            self,
            modifier_lambda,
            model_lambda,
            optim_lambda,
            test_steps_per_epoch,  # noqa: F811
    ):
        modifier = modifier_lambda()
        model = model_lambda()
        optimizer = optim_lambda(model)
        self.initialize_helper(modifier, model)
        assert get_optim_learning_rate(optimizer) == INIT_LR

        for epoch in range(int(modifier.end_epoch) + 5):
            if epoch < modifier.start_epoch:
                expected = INIT_LR
            else:
                num_gammas = sum([
                    1 for mile in modifier.lr_kwargs["milestones"]
                    if epoch >= mile
                ])
                expected = modifier.init_lr * modifier.lr_kwargs[
                    "gamma"]**num_gammas

            for step in range(test_steps_per_epoch):
                epoch_test = float(
                    epoch) + float(step) / float(test_steps_per_epoch)

                if epoch_test < modifier.start_epoch:
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)
                elif abs(epoch_test -
                         modifier.start_epoch) < sys.float_info.epsilon:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                elif epoch_test < modifier.end_epoch or modifier.end_epoch == -1:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                elif abs(epoch_test -
                         modifier.end_epoch) < sys.float_info.epsilon:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                else:
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)

                optim_lr = get_optim_learning_rate(optimizer)
                assert (abs(optim_lr - expected) <
                        EPSILON), "Failed at epoch:{} step:{}".format(
                            epoch, step)
示例#3
0
def test_get_set_optim_learning_rate():
    model = LinearNet()
    optim = SGD(model.parameters(), lr=0.01)

    check_lr = get_optim_learning_rate(optim)
    assert abs(check_lr - 0.01) < 1e-9

    set_optim_learning_rate(optim, 0.0001)

    check_lr = get_optim_learning_rate(optim)
    assert abs(check_lr - 0.0001) < 1e-9
示例#4
0
    def test_lifecycle(
            self,
            modifier_lambda,
            model_lambda,
            optim_lambda,
            test_steps_per_epoch,  # noqa: F811
    ):
        modifier = modifier_lambda()
        model = model_lambda()
        optimizer = optim_lambda(model)
        self.initialize_helper(modifier, model)
        assert get_optim_learning_rate(optimizer) == INIT_LR
        last = modifier.final_lr

        for epoch in range(int(modifier.end_epoch)):
            for step in range(test_steps_per_epoch):
                epoch_test = float(
                    epoch) + float(step) / float(test_steps_per_epoch)
                if epoch_test < modifier.start_epoch:
                    expected = INIT_LR
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)
                elif epoch_test <= modifier.end_epoch:
                    end_step = modifier.end_epoch * test_steps_per_epoch
                    start_step = modifier.start_epoch * test_steps_per_epoch
                    cycle_steps = modifier.cycle_epochs * test_steps_per_epoch
                    current_step = (epoch_test - modifier.start_epoch
                                    ) * test_steps_per_epoch
                    if (current_step > int(
                        (end_step - start_step) / cycle_steps) * cycle_steps):
                        cycle_steps = (end_step - start_step) % cycle_steps
                    adjusted_step = current_step % cycle_steps
                    expected = modifier.init_lr - (adjusted_step /
                                                   (cycle_steps - 1)) * (
                                                       modifier.init_lr -
                                                       modifier.final_lr)

                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                    assert (expected == modifier.init_lr or expected <= last
                            ), f"Failed at epoch:{epoch} step:{step}"
                    last = expected
                else:
                    expected = modifier.final_lr
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)

                assert (abs(get_optim_learning_rate(optimizer) - expected) <
                        EPSILON), f"Failed at epoch:{epoch} step:{step}"
示例#5
0
    def test_lifecycle(
            self,
            modifier_lambda,
            model_lambda,
            optim_lambda,
            test_steps_per_epoch,  # noqa: F811
    ):
        modifier = modifier_lambda()
        model = model_lambda()
        optimizer = optim_lambda(model)
        self.initialize_helper(modifier, model)
        assert modifier.applied_learning_rate < 0
        assert get_optim_learning_rate(optimizer) == INIT_LR

        for epoch in range(int(modifier.start_epoch) + 10):
            expected = (INIT_LR if epoch < modifier.start_epoch else
                        modifier.learning_rate)

            for step in range(test_steps_per_epoch):
                epoch_test = float(
                    epoch) + float(step) / float(test_steps_per_epoch)

                if epoch < modifier.start_epoch:
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)
                elif (epoch == modifier.start_epoch or
                      (modifier.start_epoch == -1
                       and epoch == 0)) and step == 0:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                else:
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)

                if epoch >= modifier.start_epoch:
                    assert (abs(modifier.applied_learning_rate - expected) <
                            EPSILON), "Failed at epoch:{} step:{}".format(
                                epoch, step)
                else:
                    assert (abs(modifier.applied_learning_rate - -1.0) <
                            EPSILON), "Failed at epoch:{} step:{}".format(
                                epoch, step)

                assert (abs(get_optim_learning_rate(optimizer) - expected) <
                        EPSILON), "Failed at epoch:{} step:{}".format(
                            epoch, step)
示例#6
0
    def test_lifecycle(
            self,
            modifier_lambda,
            model_lambda,
            optim_lambda,
            test_steps_per_epoch,  # noqa: F811
    ):
        modifier = modifier_lambda()
        model = model_lambda()
        optimizer = optim_lambda(model)
        self.initialize_helper(modifier, model)
        assert get_optim_learning_rate(optimizer) == INIT_LR
        last = 1.0

        for epoch in range(int(modifier.end_epoch) + 5):
            for step in range(test_steps_per_epoch):
                epoch_test = float(
                    epoch) + float(step) / float(test_steps_per_epoch)

                if epoch_test < modifier.start_epoch:
                    expected = INIT_LR
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)
                elif epoch_test <= modifier.end_epoch:
                    expected = (math.cos(
                        ((epoch_test - modifier.start_epoch) /
                         (modifier.end_epoch - modifier.start_epoch)) *
                        math.pi) * (modifier.init_lr - modifier.final_lr) / 2 +
                                (modifier.init_lr - modifier.final_lr) / 2 +
                                modifier.final_lr)
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                    assert expected <= last, f"Failed at epoch:{epoch} step:{step}"
                    last = expected
                else:
                    expected = modifier.final_lr
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)

                assert (abs(get_optim_learning_rate(optimizer) - expected) <
                        EPSILON), f"Failed at epoch:{epoch} step:{step}"
示例#7
0
    def test_lifecycle(
            self,
            modifier_lambda,
            model_lambda,
            optim_lambda,
            test_steps_per_epoch,  # noqa: F811
    ):
        modifier = modifier_lambda()
        model = model_lambda()
        optimizer = optim_lambda(model)
        self.initialize_helper(modifier, model)
        assert get_optim_learning_rate(optimizer) == INIT_LR

        for epoch in range(int(modifier.end_epoch) + 5):
            for step in range(test_steps_per_epoch):
                epoch_test = float(
                    epoch) + float(step) / float(test_steps_per_epoch)

                if epoch_test < modifier.start_epoch:  # noqa: F811
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)
                elif abs(epoch_test -
                         modifier.start_epoch) < sys.float_info.epsilon:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(
                        model, optimizer, epoch_test,
                        test_steps_per_epoch)  # noqa: F811
                elif epoch_test < modifier.end_epoch or modifier.end_epoch == -1:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                elif abs(epoch_test -
                         modifier.end_epoch) < sys.float_info.epsilon:
                    assert modifier.update_ready(epoch_test,
                                                 test_steps_per_epoch)
                    modifier.scheduled_update(model, optimizer, epoch_test,
                                              test_steps_per_epoch)
                else:
                    assert not modifier.update_ready(epoch_test,
                                                     test_steps_per_epoch)
示例#8
0
    def log_update(self, module: Module, optimizer: Optimizer, epoch: float,
                   steps_per_epoch: int):
        """
        Check whether to log an update for the learning rate of the modifier
        If constant logging is enabled, then will always log
        Otherwise checks for a change in the LR before logging

        :param module: module to modify
        :param optimizer: optimizer to modify
        :param epoch: current epoch and progress within the current epoch
        :param steps_per_epoch: number of steps taken within each epoch
            (calculate batch number using this and epoch)
        """
        super().log_update(module, optimizer, epoch, steps_per_epoch)
        current_lr = get_optim_learning_rate(optimizer)

        if (self._constant_logging or current_lr != self._last_logged_lr
                or math.floor(epoch) != self._last_logged_epoch):
            self._last_logged_lr = current_lr
            self._last_logged_epoch = math.floor(epoch)
            _log_lr(current_lr, self.loggers, epoch, steps_per_epoch)
示例#9
0
 def learning_rate(self) -> float:
     """
     :return: convenience function to get the first learning rate for any of
         the param groups in the optimizer
     """
     return get_optim_learning_rate(self._wrapper.wrapped_optimizer)