Exemplo n.º 1
0
    def on_batch_start(self, runner: IRunner):
        """Batch start hook.

        Args:
            runner: current runner
        """
        if not self.is_needed:
            return

        if self.alpha > 0:
            self.lam = np.random.beta(self.alpha, self.alpha)
        else:
            self.lam = 1

        self.index = torch.randperm(runner.input[self.fields[0]].shape[0])
        self.index.to(runner.device)

        for f in self.fields:
            runner.input[f] = (self.lam * runner.input[f] +
                               (1 - self.lam) * runner.input[f][self.index])
Exemplo n.º 2
0
 def on_loader_end(self, state: IRunner):
     lr = state.scheduler.get_last_lr()
     state.epoch_metrics["lr"] = lr[0]
     if state.is_train_loader:
         state.scheduler.step()