def fit(self, iters, content_img, style_img, style_ratio, content_layers=None): """ Run the recipe Args: n_iters (int): number of iterations to run content (PIL.Image): content image style (PIL.Image): style image ratio (float): weight of style loss content_layers (list of str): layers on which to reconstruct content """ self.loss.to(self.device) self.loss.set_style(pil2t(style_img).to(self.device), style_ratio) self.loss.set_content(pil2t(content_img).to(self.device), content_layers) canvas = ParameterizedImg(3, content_img.height, content_img.width, init_sd=0.00) self.opt = tch.optim.RAdamW(canvas.parameters(), 1e-2, (0.7, 0.7), eps=0.00001, weight_decay=0) def forward(_): self.opt.zero_grad() img = canvas() loss, losses = self.loss(img) loss.backward() return { 'loss': loss, 'content_loss': losses['content_loss'], 'style_loss': losses['style_loss'], 'img': img } loop = Recipe(forward, range(iters)) loop.register('canvas', canvas) loop.register('model', self) loop.callbacks.add_callbacks([ tcb.Counter(), tcb.WindowedMetricAvg('loss'), tcb.WindowedMetricAvg('content_loss'), tcb.WindowedMetricAvg('style_loss'), tcb.Log('img', 'img'), tcb.VisdomLogger(visdom_env=self.visdom_env, log_every=10), tcb.StdoutLogger(log_every=10), tcb.Optimizer(self.opt, log_lr=True), tcb.LRSched(torch.optim.lr_scheduler.ReduceLROnPlateau(self.opt, threshold=0.001, cooldown=500), step_each_batch=True) ]) loop.to(self.device) loop.run(1) return canvas.render().cpu()
def CrossEntropyClassification(model, train_loader, test_loader, classes, lr=3e-3, beta1=0.9, wd=1e-2, visdom_env='main', test_every=1000, log_every=100): """ A Classification recipe with a default froward training / testing pass using cross entropy, and extended with RAdamW and ReduceLROnPlateau. Args: model (nn.Module): a model learnable with cross entropy train_loader (DataLoader): Training set dataloader test_loader (DataLoader): Testing set dataloader classes (list of str): classes name, in order lr (float): the learning rate beta1 (float): RAdamW's beta1 wd (float): weight decay visdom_env (str): name of the visdom environment to use, or None for not using Visdom (default: None) test_every (int): testing frequency, in number of iterations (default: 1000) log_every (int): logging frequency, in number of iterations (default: 1000) """ def train_step(batch): x, y = batch pred = model(x) loss = torch.nn.functional.cross_entropy(pred, y) loss.backward() return {'loss': loss, 'pred': pred} def validation_step(batch): x, y = batch pred = model(x) loss = torch.nn.functional.cross_entropy(pred, y) return {'loss': loss, 'pred': pred} loop = Classification(model, train_step, validation_step, train_loader, test_loader, classes, visdom_env=visdom_env, test_every=test_every, log_every=log_every) opt = RAdamW(model.parameters(), lr=lr, betas=(beta1, 0.999), weight_decay=wd) loop.callbacks.add_callbacks([ tcb.Optimizer(opt, log_lr=True), tcb.LRSched(torch.optim.lr_scheduler.ReduceLROnPlateau(opt)) ]) return loop
def MixupClassification(model, train_loader, test_loader, classes, *, lr=3e-3, beta1=0.9, wd=1e-2, visdom_env='main', test_every=1000, log_every=100): """ A Classification recipe with a default froward training / testing pass using cross entropy and mixup, and extended with RAdamW and ReduceLROnPlateau. Args: model (nn.Module): a model learnable with cross entropy train_loader (DataLoader): Training set dataloader. Must have soft targets. Should be a DataLoader loading a MixupDataset or compatible. test_loader (DataLoader): Testing set dataloader. Dataset must have categorical targets. classes (list of str): classes name, in order lr (float): the learning rate beta1 (float): RAdamW's beta1 wd (float): weight decay visdom_env (str): name of the visdom environment to use, or None for not using Visdom (default: None) test_every (int): testing frequency, in number of iterations (default: 1000) log_every (int): logging frequency, in number of iterations (default: 1000) """ from torchelie.loss import continuous_cross_entropy def train_step(batch): x, y = batch pred = model(x) loss = continuous_cross_entropy(pred, y) loss.backward() return {'loss': loss} def validation_step(batch): x, y = batch pred = model(x) loss = torch.nn.functional.cross_entropy(pred, y) return {'loss': loss, 'pred': pred} loop = TrainAndTest(model, train_step, validation_step, train_loader, test_loader, visdom_env=visdom_env, test_every=test_every, log_every=log_every) loop.callbacks.add_callbacks([ tcb.WindowedMetricAvg('loss'), ]) loop.register('classes', classes) loop.test_loop.callbacks.add_callbacks([ tcb.AccAvg(post_each_batch=False), tcb.WindowedMetricAvg('loss', False), ]) if visdom_env is not None: loop.callbacks.add_epilogues( [tcb.ImageGradientVis(), tcb.MetricsTable()]) if len(classes) <= 25: loop.test_loop.callbacks.add_callbacks([ tcb.ConfusionMatrix(classes), ]) loop.test_loop.callbacks.add_callbacks([ tcb.ClassificationInspector(30, classes, False), tcb.MetricsTable(False) ]) opt = RAdamW(model.parameters(), lr=lr, betas=(beta1, 0.999), weight_decay=wd) loop.callbacks.add_callbacks([ tcb.Optimizer(opt, log_lr=True), tcb.LRSched(torch.optim.lr_scheduler.ReduceLROnPlateau(opt)) ]) return loop
def CrossEntropyClassification(model, train_loader, test_loader, classes, lr=3e-3, beta1=0.9, wd=1e-2, visdom_env='main', test_every=1000, log_every=100): """ Extends Classification with default cross entropy forward passes. Also adds RAdamW and ReduceLROnPlateau. Inherited training callbacks: - AccAvg for displaying accuracy - WindowedMetricAvg for displaying loss - ConfusionMatrix if len(classes) <= 25 - ClassificationInspector - MetricsTable - ImageGradientVis - Counter for counting iterations, connected to the testing loop as well - VisdomLogger - StdoutLogger Training callbacks: - Optimizer with RAdamW - LRSched with ReduceLROnPlateau Testing: Testing loop is in :code:`.test_loop`. Inherited testing callbacks: - AccAvg - WindowedMetricAvg - ConfusionMatrix if :code:`len(classes) <= 25` - ClassificationInspector - MetricsTable - VisdomLogger - StdoutLogger - Checkpoint saving the best testing loss Args: model (nn.Module): a model learnable with cross entropy train_loader (DataLoader): Training set dataloader test_loader (DataLoader): Testing set dataloader classes (list of str): classes name, in order lr (float): the learning rate beta1 (float): RAdamW's beta1 wd (float): weight decay visdom_env (str): name of the visdom environment to use, or None for not using Visdom (default: None) test_every (int): testing frequency, in number of iterations (default: 1000) log_every (int): logging frequency, in number of iterations (default: 1000) """ def train_step(batch): x, y = batch pred = model(x) loss = torch.nn.functional.cross_entropy(pred, y) loss.backward() return {'loss': loss, 'pred': pred} def validation_step(batch): x, y = batch pred = model(x) loss = torch.nn.functional.cross_entropy(pred, y) return {'loss': loss, 'pred': pred} loop = Classification(model, train_step, validation_step, train_loader, test_loader, classes, visdom_env=visdom_env, test_every=test_every, log_every=log_every) opt = RAdamW(model.parameters(), lr=lr, betas=(beta1, 0.999), weight_decay=wd) loop.callbacks.add_callbacks([ tcb.Optimizer(opt, log_lr=True), tcb.LRSched(torch.optim.lr_scheduler.ReduceLROnPlateau(opt)) ]) return loop