Exemple #1
0
    def test_get_optimizer(self):
        model = TempModel()

        for opt_name in AVAILABLE_OPTIMIZERS.keys():
            opt_cls = optim.get_optimizer(opt_name)
            opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)

            assert isinstance(opt, AVAILABLE_OPTIMIZERS[opt_name])
Exemple #2
0
    def test_get_optimizer(self):
        model = TempModel()

        for opt_name in AVAILABLE_OPTIMIZERS.keys():
            if opt_name == 'fused_adam':
                if not torch.cuda.is_available():
                    continue
            opt_cls = optim.get_optimizer(opt_name)
            opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)

            assert isinstance(opt, AVAILABLE_OPTIMIZERS[opt_name])
Exemple #3
0
    def test_get_optimizer(self):
        model = TempModel()

        for opt_name in AVAILABLE_OPTIMIZERS.keys():
            if opt_name == 'fused_adam':
                if not torch.cuda.is_available():
                    continue
            opt_cls = optim.get_optimizer(opt_name)
            if opt_name == 'adafactor':
                # Adafactor's default mode uses relative_step without any lr.
                opt = opt_cls(model.parameters())
            else:
                opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)

            assert isinstance(opt, AVAILABLE_OPTIMIZERS[opt_name])