Exemple #1
0
def cvae_loss_function(dec_en_batch_data: torch.tensor,
                       batch_data: torch.tensor, mu: torch.tensor,
                       log_var: torch.tensor) -> torch.tensor:
    kld = -0.5 * torch.sum(1 + log_var - mu.pow(2) - log_var.exp())
    bce = F.binary_cross_entropy(dec_en_batch_data,
                                 batch_data,
                                 reduction='sum')

    return (bce + kld) / (batch_data.shape[0] * batch_data.shape[1])
Exemple #2
0
def compute_loss(model: torch.nn.Module,
                 model_output: torch.tensor,
                 target_labels: torch.tensor,
                 is_normalize: bool = True) -> torch.tensor:
    '''
  Computes the loss between the model output and the target labels

  Note: we have initialized the loss_criterion in the model with the sum
  reduction.

  Args:
  -   model: model (which inherits from nn.Module), and contains loss_criterion
  -   model_output: the raw scores output by the net [Dim: (N, 15)]
  -   target_labels: the ground truth class labels [Dim: (N, )]
  -   is_normalize: bool flag indicating that loss should be divided by the
                    batch size
  Returns:
  -   the loss value
  '''
    loss = None

    ############################################################################
    # Student code begin
    ############################################################################

    loss_func = torch.nn.CrossEntropyLoss()

    pred = model_output.exp() / (model_output.exp().sum(-1)).unsqueeze(-1)
    loss = loss_func(model_output, target_labels)

    if is_normalize == True:
        loss /= model_output.shape[0]
    ############################################################################
    # Student code end
    ############################################################################

    return loss