예제 #1
0
    def get_optimizer(self):
        if self.optimizer_name == 'Adam-Exp':
            opt = torch.optim.Adam(self.parameters(), lr=self.lr)
            sch = torch.optim.lr_scheduler.StepLR(opt, step_size=2, gamma=0.9)

            return opt, sch
        if self.optimizer_name == 'Adam-Tri':
            opt = torch.optim.Adam(self.parameters(), lr=self.lr)
            sch = CyclicLR(opt,
                           base_lr=0.00005,
                           max_lr=0.0002,
                           step_size=200,
                           mode='triangular')
            return opt, sch

        if self.optimizer_name == 'SGD':
            opt = torch.optim.SGD(self.parameters(), lr=self.lr)
            sch = CyclicLR(opt,
                           base_lr=1e-4,
                           max_lr=5e-4,
                           step_size=20,
                           mode='triangular')
            return opt, sch
        else:
            raise NotImplementedError
예제 #2
0
 def get_optimizer(self):
     if self.optimizer_name == 'Adam':
         opt = torch.optim.Adam(self.parameters(), 
                         lr=self.lr)
         sch = CyclicLR(opt, 
                        base_lr=1e-4,
                        max_lr=5e-4,
                        step_size=200,
                        mode='triangular'
                        )
         return opt, sch
                                      
     if self.optimizer_name == 'SGD':
         opt = torch.optim.SGD(self.parameters(),
                                       lr=self.lr)
         sch = CyclicLR(opt, 
                        base_lr=1e-4,
                        max_lr=5e-4,
                        step_size=20,
                        mode='triangular'
                        )
         return opt, sch
     else:
         raise NotImplementedError