Ejemplo n.º 1
0
 def test_optional_args(self):
     v = [1, 0.1, 0.01]
     s1 = MultiStepParamScheduler(v, num_updates=90, milestones=[30, 60])
     s2 = MultiStepParamScheduler(v, num_updates=90)
     s3 = MultiStepParamScheduler(v, milestones=[30, 60, 90])
     for i in range(10):
         k = i / 10
         self.assertEqual(s1(k), s2(k))
         self.assertEqual(s1(k), s3(k))
Ejemplo n.º 2
0
    def test_warmup_multistep(self):
        p = nn.Parameter(torch.zeros(0))
        opt = torch.optim.SGD([p], lr=5)

        multiplier = WarmupParamScheduler(
            MultiStepParamScheduler(
                [1, 0.1, 0.01, 0.001],
                milestones=[10, 15, 20],
                num_updates=30,
            ),
            0.001,
            5 / 30,
        )
        sched = LRMultiplier(opt, multiplier, 30)
        # This is an equivalent of:
        # sched = WarmupMultiStepLR(
        # opt, milestones=[10, 15, 20], gamma=0.1, warmup_factor=0.001, warmup_iters=5)

        p.sum().backward()
        opt.step()

        lrs = [0.005]
        for _ in range(30):
            sched.step()
            lrs.append(opt.param_groups[0]["lr"])
        self.assertTrue(
            np.allclose(lrs[:5], [0.005, 1.004, 2.003, 3.002, 4.001]))
        self.assertTrue(np.allclose(lrs[5:10], 5.0))
        self.assertTrue(np.allclose(lrs[10:15], 0.5))
        self.assertTrue(np.allclose(lrs[15:20], 0.05))
        self.assertTrue(np.allclose(lrs[20:], 0.005))
Ejemplo n.º 3
0
def build_lr_scheduler(
    cfg: CfgNode, optimizer: torch.optim.Optimizer
) -> torch.optim.lr_scheduler._LRScheduler:
    """
    Build a LR scheduler from config.
    """
    name = cfg.SOLVER.LR_SCHEDULER_NAME

    if name == "WarmupMultiStepLR":
        steps = [x for x in cfg.SOLVER.STEPS if x <= cfg.SOLVER.MAX_ITER]
        if len(steps) != len(cfg.SOLVER.STEPS):
            logger = logging.getLogger(__name__)
            logger.warning(
                "SOLVER.STEPS contains values larger than SOLVER.MAX_ITER. "
                "These values will be ignored."
            )
        sched = MultiStepParamScheduler(
            values=[cfg.SOLVER.GAMMA ** k for k in range(len(steps) + 1)],
            milestones=steps,
            num_updates=cfg.SOLVER.MAX_ITER,
        )
    elif name == "WarmupCosineLR":
        sched = CosineParamScheduler(1, 0)
    else:
        raise ValueError("Unknown LR scheduler: {}".format(name))

    sched = WarmupParamScheduler(
        sched,
        cfg.SOLVER.WARMUP_FACTOR,
        cfg.SOLVER.WARMUP_ITERS / cfg.SOLVER.MAX_ITER,
        cfg.SOLVER.WARMUP_METHOD,
    )
    return LRMultiplier(optimizer, multiplier=sched, max_iter=cfg.SOLVER.MAX_ITER)
Ejemplo n.º 4
0
 def _test_config_scheduler(self, config, expected_schedule):
     scheduler = MultiStepParamScheduler(**config)
     schedule = [
         scheduler(epoch_num / self._num_updates)
         for epoch_num in range(self._num_updates)
     ]
     self.assertEqual(schedule, expected_schedule)
def build_lr_scheduler(
    cfg: CfgNode, optimizer: torch.optim.Optimizer
) -> torch.optim.lr_scheduler._LRScheduler:
    """
    Build a LR scheduler from config.
    """
    name = cfg.SOLVER.LR_SCHEDULER_NAME

    if name == "WarmupMultiStepLR":
        sched = MultiStepParamScheduler(
            values=[cfg.SOLVER.GAMMA ** k for k in range(len(cfg.SOLVER.STEPS) + 1)],
            milestones=cfg.SOLVER.STEPS,
            num_updates=cfg.SOLVER.MAX_ITER,
        )
    elif name == "WarmupCosineLR":
        sched = CosineParamScheduler(1, 0)
    else:
        raise ValueError("Unknown LR scheduler: {}".format(name))

    sched = WarmupParamScheduler(
        sched,
        cfg.SOLVER.WARMUP_FACTOR,
        cfg.SOLVER.WARMUP_ITERS / cfg.SOLVER.MAX_ITER,
        cfg.SOLVER.WARMUP_METHOD,
    )
    return LRMultiplier(optimizer, multiplier=sched, max_iter=cfg.SOLVER.MAX_ITER)
Ejemplo n.º 6
0
    def test_invalid_config(self):
        # Invalid num epochs
        config = self._get_valid_config()

        bad_config = copy.deepcopy(config)
        bad_config["num_updates"] = -1
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)

        bad_config["values"] = {"a": "b"}
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)

        bad_config["values"] = []
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)

        # Invalid drop epochs
        bad_config["values"] = config["values"]
        bad_config["milestones"] = {"a": "b"}
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)

        # Too many
        bad_config["milestones"] = [3, 6, 8, 12]
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)

        # Too few
        bad_config["milestones"] = [3, 6]
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)

        # Exceeds num_updates
        bad_config["milestones"] = [3, 6, 12]
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)

        # Out of order
        bad_config["milestones"] = [3, 8, 6]
        with self.assertRaises(ValueError):
            MultiStepParamScheduler(**bad_config)
Ejemplo n.º 7
0
def build_lr_scheduler(
        cfg: CfgNode, optimizer: torch.optim.Optimizer
) -> torch.optim.lr_scheduler._LRScheduler:
    """
    Build a LR scheduler from config.
    """
    name = cfg.SOLVER.LR_SCHEDULER_NAME

    if name == "WarmupMultiStepLR":
        sched = MultiStepParamScheduler(
            values=[
                cfg.SOLVER.GAMMA**k for k in range(len(cfg.SOLVER.STEPS) + 1)
            ],
            milestones=cfg.SOLVER.STEPS,
            num_updates=cfg.SOLVER.MAX_ITER,
        )
    elif name == "WarmupCosineLR":
        sched = CosineParamScheduler(1, 0)
    else:
        raise ValueError("Unknown LR scheduler: {}".format(name))

    # Add warmup
    warmup_method = cfg.SOLVER.WARMUP_METHOD
    if warmup_method == "constant":
        warmup = ConstantParamScheduler(cfg.SOLVER.WARMUP_FACTOR)
    elif warmup_method == "linear":
        warmup = LinearParamScheduler(cfg.SOLVER.WARMUP_FACTOR, 1.0)
    else:
        raise ValueError("Unknown warmup method: {}".format(warmup_method))
    warmup_ratio = cfg.SOLVER.WARMUP_ITERS / cfg.SOLVER.MAX_ITER
    sched = CompositeParamScheduler(
        [warmup, sched],
        interval_scaling=["rescaled", "fixed"],
        lengths=[warmup_ratio, 1 - warmup_ratio],
    )
    return LRMultiplier(optimizer,
                        multiplier=sched,
                        max_iter=cfg.SOLVER.MAX_ITER)