def optimizer(self, model, task, lr):
     if task == 'mnist':
         # Was using lr0=0.001 and lr1=0.01
         params = model.all_params()
         optimizers = [
             optim.SGD(params[0], lr=lr, momentum=0.9, weight_decay=0.0005),
             optim.SGD(params[1], lr=lr, momentum=0.9)
         ]
         schedulers = [lr_scheduler.StepLR(optimizers[0], 10, gamma=0.5)]
     elif task == 'speaker':
         optimizers = sincnet_optims(model, lr)
         optimizers.append(optim.SGD(self.loss_module.parameters(), lr=lr))
         schedulers = []
     elif task == 'sts':
         optimizers, schedulers = [
             optim.RMSprop(model.parameters(), lr=lr)
         ], []
     else:
         raise ValueError('Task must be one of mnist/speaker/sts')
     return base.Optimizer(optimizers, schedulers)
 def optimizer(self, model, task, lr):
     if task == 'mnist':
         # Was using lr=0.0001
         optimizers = [
             optim.SGD(model.parameters(),
                       lr=lr,
                       momentum=0.9,
                       weight_decay=0.0005)
         ]
         schedulers = [lr_scheduler.StepLR(optimizers[0], 5, gamma=0.8)]
     elif task == 'speaker':
         optimizers = sincnet_optims(model, lr)
         schedulers = []
     elif task == 'sts':
         optimizers = [
             optim.RMSprop(model.parameters(), lr=lr, momentum=0.9)
         ]
         schedulers = []
     else:
         raise ValueError('Task must be one of mnist/speaker/sts')
     return base.Optimizer(optimizers, schedulers)
예제 #3
0
 def optimizer(self, model: MetricNet, lr: float):
     optimizers = self.get_optimizers(model, lr)
     return base.Optimizer(optimizers, reduce_lr(optimizers))
 def optimizer(self, model, task, lr=0.0001):
     return base.Optimizer([optim.RMSprop(model.parameters(), lr=lr)], [])