Esempio n. 1
0
 def test_update(self):
     params = [torch.nn.Parameter(torch.randn(2,3,4))]
     optimizer = Optimizer(torch.optim.Adam(params, lr=1), max_grad_norm=5)
     scheduler = StepLR(optimizer.optimizer, 1, gamma=0.1)
     optimizer.set_scheduler(scheduler)
     optimizer.update(10, 0)
     optimizer.update(10, 1)
     self.assertEquals(optimizer.optimizer.param_groups[0]['lr'], 0.1)
 def test_update(self):
     optim = Optimizer(torch.optim.SGD,
                       lr=1,
                       decay_after_epoch=5,
                       lr_decay=0.5)
     params = [torch.nn.Parameter(torch.randn(2, 3, 4))]
     optim.set_parameters(params)
     optim.update(0, 10)
     self.assertEquals(optim.optimizer.param_groups[0]['lr'], 0.5)