def load_extras(self): # learning rate scheduler. scheduler_lambda_fn = lambda x: warmup_lr_lambda( x, self.config["optim"]) self.scheduler = optim.lr_scheduler.LambdaLR( self.optimizer, lr_lambda=scheduler_lambda_fn) # metrics. self.meter = Meter(split="train")
def __init__(self, optimizer, config): self.optimizer = optimizer self.config = config.copy() if "scheduler" in self.config: self.scheduler_type = self.config["scheduler"] else: self.scheduler_type = "LambdaLR" scheduler_lambda_fn = lambda x: warmup_lr_lambda(x, self.config) self.config["lr_lambda"] = scheduler_lambda_fn if self.scheduler_type != "Null": self.scheduler = getattr(lr_scheduler, self.scheduler_type) scheduler_args = self.filter_kwargs(config) self.scheduler = self.scheduler(optimizer, **scheduler_args)