def test_lr_injection(self): model, perfect_model, data, label = self._createDense() opt = self.build_optimizer(model, max_gradient_norm=1, allow_lr_injection=True) workspace.FeedBlob('data', data[0]) workspace.FeedBlob('label', label[0]) workspace.RunNetOnce(model.param_init_net) workspace.CreateNet(model.net, True) # Test LR injection initialized properly self.assertIsNotNone(opt._lr_multiplier) self.assertEqual(optimizer.get_lr_injection(), 1) # Test that we're able to modify the value of the lr_injection optimizer.set_lr_injection(0) self.assertEqual(optimizer.get_lr_injection(), 0) # Test that setting the lr_injector properly propogates to the # lr_multiplier. Here, we have both lr_injector and norm_ratio that # affect the lr_multiplier workspace.RunNet(model.net.Proto().name) self.assertEqual(workspace.FetchBlob('lr_multiplier'), 0)
def test_lr_injection(self): model, perfect_model, data, label = self._createDense() opt = self.build_optimizer( model, max_gradient_norm=1, allow_lr_injection=True ) workspace.FeedBlob('data', data[0]) workspace.FeedBlob('label', label[0]) workspace.RunNetOnce(model.param_init_net) workspace.CreateNet(model.net, True) # Test LR injection initialized properly self.assertIsNotNone(opt._lr_multiplier) self.assertEqual(optimizer.get_lr_injection(), 1) # Test that we're able to modify the value of the lr_injection optimizer.set_lr_injection(0) self.assertEqual(optimizer.get_lr_injection(), 0) # Test that setting the lr_injector properly propogates to the # lr_multiplier. Here, we have both lr_injector and norm_ratio that # affect the lr_multiplier workspace.RunNet(model.net.Proto().name) self.assertEqual(workspace.FetchBlob('lr_multiplier'), 0)