def test_step(self, batch, batch_idx): data, target = batch logits = self.student(data) # loss = F.cross_entropy(logits, target) loss = utils.focal_loss(logits, target, gamma=2, ignore_index=255) self.test_loss += loss.item() self.test_metrics.update( logits.max(1)[1].detach().cpu().numpy().astype('uint8'), target.detach().cpu().numpy().astype('uint8'))
def test_step(self, batch, batch_idx): ''' Perform a single forward pass on test batch ''' data, target = batch logits = self.model(data) loss = utils.focal_loss(logits, target, gamma=2, ignore_index=255) self.test_loss += loss.item() self.test_metrics.update( logits.max(1)[1].detach().cpu().numpy().astype('uint8'), target.detach().cpu().numpy().astype('uint8'))
def test_step(self, batch, batch_idx): data, target = batch s_logits = self.model(data) ce_loss = utils.focal_loss(s_logits, target, gamma=2, ignore_index=255) with torch.no_grad(): t_logits = self.teacher(data) kd_loss = utils.soft_cross_entropy(s_logits, t_logits) # loss = kd_loss loss = ce_loss + self.hparams.kd_weight * kd_loss self.test_loss += loss.item() self.test_ce_loss += ce_loss.item() self.test_kd_loss += kd_loss.item() self.test_metrics.update( s_logits.max(1)[1].detach().cpu().numpy().astype('uint8'), target.detach().cpu().numpy().astype('uint8'))
def train_step(self, batch, batch_idx): ''' Perform a single step (Forward and backward) of train on a given batch Log necessary metrics ''' data, target = batch self.optimizer.zero_grad() logits = self.model(data) loss = utils.focal_loss(logits, target, gamma=2, ignore_index=255) self.train_loss += loss.item() loss.backward() self.optimizer.step() self.train_metrics.update( logits.max(1)[1].detach().cpu().numpy().astype('uint8'), target.detach().cpu().numpy().astype('uint8'))