Ejemplo n.º 1
0
 def common_step(self, batch, batch_nb, train=True):
     mix, clean = batch
     mix = unsqueeze_to_3d(mix)
     clean = unsqueeze_to_3d(clean)
     
     mix_tf = self.model.forward_encoder(mix)
     clean_tf = self.model.forward_encoder(clean)
     est_masks = self.model.forward_masker(mix_tf)
     est_tf = self.model.apply_masks(mix_tf, est_masks)
     
     return self.loss_func(est_tf, clean_tf)
Ejemplo n.º 2
0
 def common_step(self, batch, batch_nb, train=True):
     mix, clean = batch
     mix = unsqueeze_to_3d(mix)
     clean = unsqueeze_to_3d(clean)
     
     mix_tf = self.model.forward_encoder(mix)
     clean_tf = self.model.forward_encoder(clean)
     
     true_irm = torch.minimum(mag(clean_tf) / mag(mix_tf), torch.tensor(1).type_as(mix_tf))
     est_irm = self.model.forward_masker(mix_tf)
     loss = self.loss_func(est_irm, true_irm)
     return loss
Ejemplo n.º 3
0
 def common_step(self, batch, batch_nb, train=True):
     mix, clean = batch
     mix = unsqueeze_to_3d(mix)
     clean = unsqueeze_to_3d(clean)
     
     mix_tf = self.model.forward_encoder(mix)
     clean_tf = self.model.forward_encoder(clean)
     
     clean_pow = torch.pow(mag(clean_tf), 2)
     mix_pow = torch.pow(mag(mix_tf), 2)
     
     est_pow, mu, logvar = self.model.forward_vae_mu_logvar(mix_pow)
     
     loss, rec_loss, kl_loss = self.loss_func(est_pow, clean_pow, mu, logvar)
     self.log("rec_loss", rec_loss, logger=True)
     self.log("kl_loss", kl_loss, logger=True)
     return loss
Ejemplo n.º 4
0
 def common_step(self, batch, batch_nb, train=True):
     mix, clean = batch
     mix = unsqueeze_to_3d(mix)
     clean = unsqueeze_to_3d(clean)
     
     mix_tf = self.model.forward_encoder(mix)
     clean_tf = self.model.forward_encoder(clean)
     
     clean_pow = torch.pow(mag(clean_tf), 2)
     
     # a HACK to not fiddle with datasets changing - training on clean data!
     est_pow, mu, logvar = self.model.forward_vae_mu_logvar(clean_pow)
     
     loss, rec_loss, kl_loss = self.loss_func(est_pow, clean_pow, mu, logvar)
     self.log("rec_loss", rec_loss, logger=True)
     self.log("kl_loss", kl_loss, logger=True)
     return loss
Ejemplo n.º 5
0
 def common_step(self, batch, batch_nb, train=True):
     mix, clean = batch
     mix = unsqueeze_to_3d(mix)
     clean = unsqueeze_to_3d(clean)
     
     mix_tf = self.model.forward_encoder(mix)
     clean_tf = self.model.forward_encoder(clean)
     
     model_output = self.model.forward_masker(mix_tf)
     
     if self.model.target == "cIRM":
         target_mask = perfect_cirm(mix_tf, clean_tf)
         loss = self.loss_func(model_output, target_mask)
         
     elif self.model.target == "TMS":
         loss = self.loss_func(model_output, mag(clean_tf))
     else:
         loss = self.loss_func(model_output, clean_tf)
     
     return loss
Ejemplo n.º 6
0
def l1_loss_wrapper(est_target, target):
    return F.l1_loss(unsqueeze_to_3d(est_target), unsqueeze_to_3d(target))
Ejemplo n.º 7
0
def vae_simple_loss_wrapper(est_target, target, mu, logvar):
    recon = F.mse_loss(unsqueeze_to_3d(est_target), unsqueeze_to_3d(target))
    KLD = -0.5 * torch.sum(logvar - mu.pow(2) - logvar.exp())
    return recon + 1e-10 * KLD, recon, KLD