def forward(self, embeddings):
     torch.clamp(self.w, 1e-6)
     centroids = get_centroids(embeddings)
     cossim = get_cossim(embeddings, centroids)
     sim_matrix = self.w * cossim.to(self.device) + self.b
     loss, _ = calc_loss(sim_matrix)
     return loss
예제 #2
0
 def forward(self, embeddings, y=None):
     #pdb.set_trace()
     torch.clamp(self.w, 1e-6)
     centroids = get_centroids(embeddings)
     cossim = get_cossim(embeddings, centroids)
     sim_matrix = self.w*cossim + self.b
     loss, _ = calc_loss(sim_matrix)
     return loss
예제 #3
0
    def forward(self, embeddings):
        torch.clamp(self.w, hp.re_num)

        centroids = utils.get_centroids(embeddings)
        cossim = utils.get_cossim(embeddings, centroids)

        sim_matrix = self.w * cossim + self.b
        loss, _ = utils.cal_loss(sim_matrix)

        return loss
예제 #4
0
 def forward(self, embeddings, embedder_net, lamb):
     torch.clamp(self.w, 1e-6)
     centroids = get_centroids(embeddings)
     cossim = get_cossim(embeddings, centroids)
     sim_matrix = self.w * cossim.to(self.device) + self.b
     per_loss, _ = calc_loss(sim_matrix)
     weights = embedder_net.LSTM_stack.all_weights
     norm_loss = lamb * torch.sum(
         torch.Tensor([
             torch.norm(weights[i][j].data.to(self.device), 2)
             for i in range(hp.model.num_layer) for j in range(4)
         ]))
     loss = per_loss + norm_loss
     return loss, per_loss, norm_loss