Ejemplo n.º 1
0
def test_concat_scheduler_asserts():

    tensor = torch.zeros([1], requires_grad=True)
    optimizer = torch.optim.SGD([tensor], lr=0)

    scheduler_1 = LinearCyclicalScheduler(optimizer, "lr", start_value=1.0, end_value=0.0, cycle_size=10)
    scheduler_2 = CosineAnnealingScheduler(optimizer, "lr", start_value=0.0, end_value=1.0, cycle_size=10)

    with pytest.raises(TypeError, match=r"Argument schedulers should be a sequence"):
        ConcatScheduler(schedulers=None, durations=[])

    with pytest.raises(ValueError, match=r"Argument schedulers should be of more than one parameter schedulers"):
        ConcatScheduler(schedulers=[], durations=[])

    with pytest.raises(ValueError, match=r"Argument schedulers should be of more than one parameter schedulers"):
        ConcatScheduler(schedulers=[scheduler_1], durations=[10])

    with pytest.raises(TypeError, match=r"Value at index 1 of schedulers should be a parameter scheduler"):
        ConcatScheduler(schedulers=[scheduler_1, 12], durations=[10])

    with pytest.raises(ValueError, match=r"Incorrect number schedulers or duration values"):
        ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=[10, 5])

    with pytest.raises(ValueError, match=r"Argument durations should be list/tuple of integers"):
        ConcatScheduler(schedulers=[scheduler_1, scheduler_2, scheduler_2], durations=[15, 12.0])

    with pytest.raises(TypeError, match=r"Argument durations should be list/tuple"):
        ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations="abc")

    with pytest.raises(TypeError, match=r"Argument param_names should be list or tuple"):
        ConcatScheduler.simulate_values(
            num_events=123, schedulers=[scheduler_1, scheduler_2], durations=[15], param_names="abc"
        )

    with pytest.raises(ValueError, match=r"Argument param_names should be list or tuple of strings"):
        ConcatScheduler.simulate_values(
            num_events=123, schedulers=[scheduler_1, scheduler_2], durations=[15], param_names=[1]
        )

    optimizer_2 = torch.optim.SGD([tensor], lr=0)
    scheduler_3 = CosineAnnealingScheduler(optimizer_2, "lr", start_value=0.0, end_value=1.0, cycle_size=10)

    with pytest.raises(ValueError, match=r"schedulers should be related to same optimizer"):
        ConcatScheduler([scheduler_1, scheduler_3], durations=[30])

    scheduler_4 = CosineAnnealingScheduler(optimizer, "lr2", start_value=0.0, end_value=1.0, cycle_size=10)

    with pytest.raises(ValueError, match=r"schedulers should be related to same param_name"):
        ConcatScheduler([scheduler_1, scheduler_4], durations=[30])

    with pytest.raises(ValueError, match=r"schedulers should be related to same optimizer"):
        ConcatScheduler.simulate_values(3, [scheduler_1, scheduler_3], durations=[30])
Ejemplo n.º 2
0
def test_concat_scheduler_state_dict():
    tensor = torch.zeros([1], requires_grad=True)
    optimizer = torch.optim.SGD([tensor], lr=0)
    scheduler_1 = LinearCyclicalScheduler(optimizer, "lr", start_value=1.0, end_value=0.0, cycle_size=10)
    scheduler_2 = CosineAnnealingScheduler(optimizer, "lr", start_value=0.0, end_value=1.0, cycle_size=10)
    durations = [10]
    concat_scheduler = ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=durations, save_history=False)
    state_dict = concat_scheduler.state_dict()

    assert state_dict["durations"] == durations
    assert state_dict["_current_duration"] == durations[0]
    assert state_dict["_scheduler_index"] == 0

    for _ in range(20):
        concat_scheduler(None, None)

    concat_scheduler.load_state_dict(state_dict)
    assert concat_scheduler.durations == durations
    assert concat_scheduler._current_duration == durations[0]
    assert id(concat_scheduler._current_scheduler) == id(scheduler_1)

    with pytest.raises(ValueError, match=r"Required state attribute 'schedulers' is absent in provided state_dict"):
        concat_scheduler.load_state_dict({"a": 1})

    with pytest.raises(ValueError, match=r"Input state_dict contains 0 state_dicts of concatenated schedulers"):
        concat_scheduler.load_state_dict({"schedulers": []})

    with pytest.raises(TypeError, match=r"Argument state_dict should be a dictionary, but given"):
        concat_scheduler.load_state_dict(None)
Ejemplo n.º 3
0
def test_cosine_annealing_scheduler():
    tensor = torch.zeros([1], requires_grad=True)
    optimizer = torch.optim.SGD([tensor], lr=0)

    scheduler = CosineAnnealingScheduler(optimizer, "lr", 0, 1, 10)
    state_dict = scheduler.state_dict()

    data = [0] * 9
    max_epochs = 2
    simulated_values = CosineAnnealingScheduler.simulate_values(
        num_events=len(data) * max_epochs, param_name="lr", start_value=0, end_value=1, cycle_size=10
    )

    def save_lr(engine):
        lrs.append(optimizer.param_groups[0]["lr"])

    trainer = Engine(lambda engine, batch: None)
    trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
    trainer.add_event_handler(Events.ITERATION_COMPLETED, save_lr)

    for _ in range(2):
        lrs = []
        trainer.run(data, max_epochs=max_epochs)

        assert lrs == list(
            map(
                pytest.approx,
                [
                    0.0,
                    0.02447174185242318,
                    0.09549150281252627,
                    0.20610737385376332,
                    0.3454915028125263,
                    0.5,
                    0.6545084971874737,
                    0.7938926261462365,
                    0.9045084971874737,
                    0.9755282581475768,
                    0.0,
                    0.02447174185242318,
                    0.09549150281252627,
                    0.20610737385376332,
                    0.3454915028125263,
                    0.5,
                    0.6545084971874737,
                    0.7938926261462365,  # 0.9045084971874737, 0.9755282581475768
                ],
            )
        )
        scheduler.load_state_dict(state_dict)

        assert lrs == pytest.approx([v for i, v in simulated_values])
Ejemplo n.º 4
0
def test_scheduler_with_param_groups():
    def _test(lr_scheduler, optimizer):
        num_iterations = 10
        max_epochs = 20

        state_dict = lr_scheduler.state_dict()

        trainer = Engine(lambda engine, batch: None)

        @trainer.on(Events.ITERATION_COMPLETED)
        def save_lr():
            lrs.append((optimizer.param_groups[0]["lr"], optimizer.param_groups[1]["lr"]))

        trainer.add_event_handler(Events.ITERATION_STARTED, lr_scheduler)

        data = [0] * num_iterations

        for _ in range(2):
            lrs = []
            trainer.run(data, max_epochs=max_epochs)
            assert [lr[0] for lr in lrs] == pytest.approx([lr[1] for lr in lrs])
            lr_scheduler.load_state_dict(state_dict)

    t1 = torch.zeros([1], requires_grad=True)
    t2 = torch.zeros([1], requires_grad=True)
    optimizer = torch.optim.SGD([{"params": t1, "lr": 0.1}, {"params": t2, "lr": 0.1}])

    lr_scheduler = LinearCyclicalScheduler(optimizer, "lr", start_value=1.0, end_value=0.0, cycle_size=10)
    _test(lr_scheduler, optimizer)

    lr_scheduler = PiecewiseLinear(
        optimizer, "lr", milestones_values=[(5, 0.5), (15, 1.0), (25, 0.0), (35, 1.0), (40, 0.5)]
    )
    _test(lr_scheduler, optimizer)

    lr_scheduler = CosineAnnealingScheduler(optimizer, "lr", start_value=0.0, end_value=1.0, cycle_size=10)
    _test(lr_scheduler, optimizer)

    torch_lr_scheduler = ExponentialLR(optimizer, gamma=0.98)
    _test(LRScheduler(torch_lr_scheduler), optimizer)

    torch_lr_scheduler = StepLR(optimizer, step_size=50, gamma=0.5)
    _test(LRScheduler(torch_lr_scheduler), optimizer)
Ejemplo n.º 5
0
def test_simulate_and_plot_values():

    import matplotlib

    matplotlib.use("Agg")

    def _test(scheduler_cls, **scheduler_kwargs):

        optimizer = None
        event = Events.ITERATION_STARTED
        if scheduler_cls == LRScheduler:
            optimizer = scheduler_kwargs["lr_scheduler"].optimizer
            event = Events.ITERATION_COMPLETED
        elif scheduler_cls == ConcatScheduler:
            optimizer = scheduler_kwargs["optimizer"]
            del scheduler_kwargs["optimizer"]
        else:
            tensor = torch.zeros([1], requires_grad=True)
            scheduler_kwargs["optimizer"] = torch.optim.SGD([tensor], lr=0.1)
            optimizer = scheduler_kwargs["optimizer"]

        max_epochs = 2
        data = [0] * 10
        # simulated_values = scheduler_cls.simulate_values(num_events=len(data) * max_epochs, **scheduler_kwargs)

        scheduler = scheduler_cls(**scheduler_kwargs)

        lrs = []

        def save_lr(engine):
            lrs.append(optimizer.param_groups[0]["lr"])

        trainer = Engine(lambda engine, batch: None)
        trainer.add_event_handler(event, scheduler)
        trainer.add_event_handler(Events.ITERATION_STARTED, save_lr)
        trainer.run(data, max_epochs=max_epochs)

        # assert lrs == pytest.approx([v for i, v in simulated_values])

        if scheduler_cls == LRScheduler or scheduler_cls == ConcatScheduler:
            # As internal state of torch lr scheduler has been changed the following checks will fail
            return

        # reexecute to check if no internal changes
        # simulated_values = scheduler_cls.simulate_values(num_events=len(data) * max_epochs,
        #                                                  save_history=True,  # this will be removed
        #                                                  **scheduler_kwargs)
        # assert lrs == pytest.approx([v for i, v in simulated_values])

        # launch plot values
        scheduler_cls.plot_values(num_events=len(data) * max_epochs, **scheduler_kwargs)

    # LinearCyclicalScheduler
    _test(LinearCyclicalScheduler, param_name="lr", start_value=1.0, end_value=0.0, cycle_size=10)

    # CosineAnnealingScheduler
    _test(CosineAnnealingScheduler, param_name="lr", start_value=1.0, end_value=0.0, cycle_size=10)

    # LRScheduler
    tensor = torch.zeros([1], requires_grad=True)
    optimizer = torch.optim.SGD([tensor], lr=0.1)
    torch_lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer=optimizer, gamma=0.5)

    _test(LRScheduler, lr_scheduler=torch_lr_scheduler)

    # ConcatScheduler = [LinearCyclicalScheduler, CosineAnnealingScheduler]
    scheduler_1 = LinearCyclicalScheduler(optimizer, "lr", start_value=1.0, end_value=0.0, cycle_size=20)
    scheduler_2 = CosineAnnealingScheduler(optimizer, "lr", start_value=0.0, end_value=1.0, cycle_size=10)
    durations = [10]
    _test(ConcatScheduler, optimizer=optimizer, schedulers=[scheduler_1, scheduler_2], durations=durations)

    # ConcatScheduler = [LinearCyclicalScheduler, LRScheduler]
    tensor = torch.ones([1], requires_grad=True)
    optimizer = torch.optim.SGD([tensor], lr=0.001)
    torch_lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer=optimizer, gamma=1.5)
    scheduler_1 = LRScheduler(torch_lr_scheduler)
    scheduler_2 = LinearCyclicalScheduler(optimizer, "lr", start_value=0.1, end_value=0.0, cycle_size=10)
    durations = [10]
    _test(ConcatScheduler, optimizer=optimizer, schedulers=[scheduler_1, scheduler_2], durations=durations)

    # PiecewiseLinear
    tensor = torch.ones([1], requires_grad=True)
    optimizer = torch.optim.SGD([tensor], lr=0.001)
    _test(
        PiecewiseLinear,
        optimizer=optimizer,
        param_name="lr",
        milestones_values=[(10, 0.5), (20, 0.45), (21, 0.3), (30, 0.1), (40, 0.1)],
    )

    with pytest.raises(RuntimeError, match=r"This method requires matplotlib to be installed."):
        with patch.dict("sys.modules", {"matplotlib.pyplot": None}):
            _test(
                PiecewiseLinear,
                optimizer=optimizer,
                param_name="lr",
                milestones_values=[(10, 0.5), (20, 0.45), (21, 0.3), (30, 0.1), (40, 0.1)],
            )
Ejemplo n.º 6
0
def test_concat_scheduler_two_schedulers(duration_vals_as_np_int):
    tensor = torch.zeros([1], requires_grad=True)
    optimizer = torch.optim.SGD([tensor], lr=0)

    scheduler_1 = LinearCyclicalScheduler(optimizer, "lr", start_value=1.0, end_value=0.0, cycle_size=10)
    scheduler_2 = CosineAnnealingScheduler(optimizer, "lr", start_value=0.0, end_value=1.0, cycle_size=10)

    durations = [10]
    if duration_vals_as_np_int:
        durations = [np.int64(t) for t in durations]

    concat_scheduler = ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=durations, save_history=True)
    state_dict = concat_scheduler.state_dict()

    data = [0] * 10
    max_epochs = 2
    simulated_values = ConcatScheduler.simulate_values(
        num_events=len(data) * max_epochs, schedulers=[scheduler_1, scheduler_2], durations=durations
    )

    def save_lr(engine):
        lrs.append(optimizer.param_groups[0]["lr"])

    trainer = Engine(lambda engine, batch: None)
    trainer.add_event_handler(Events.ITERATION_STARTED, concat_scheduler)
    trainer.add_event_handler(Events.ITERATION_COMPLETED, save_lr)

    for _ in range(2):
        lrs = []
        trainer.run(data, max_epochs=max_epochs)

        assert lrs == list(
            map(
                pytest.approx,
                [
                    # Cycle 1 of the LinearCyclicalScheduler
                    1.0,
                    0.8,
                    0.6,
                    0.4,
                    0.2,
                    0.0,
                    0.2,
                    0.4,
                    0.6,
                    0.8,
                    # Cycle 1 of the CosineAnnealingScheduler
                    0.0,
                    0.02447174185242318,
                    0.09549150281252627,
                    0.20610737385376332,
                    0.3454915028125263,
                    0.5,
                    0.6545084971874737,
                    0.7938926261462365,
                    0.9045084971874737,
                    0.9755282581475768,
                ],
            )
        )

        state_lrs = trainer.state.param_history["lr"]
        assert len(state_lrs) == len(lrs)
        # Unpack singleton lists
        assert [group[0] for group in state_lrs] == lrs
        assert lrs == pytest.approx([v for i, v in simulated_values])
        concat_scheduler.load_state_dict(state_dict)

        trainer.state.param_history = None