def discrete_loss(y_true, y_pred):
    logits_parameters_output = nb.reshape_channel_to_parameters(y_pred, 10)
    scale_input = tf.multiply(y_true, scale_const)
    rounds = tf.cast(tf.clip_by_value(tf.round(scale_input), 0, 9), tf.int64)
    cross = tf.nn.sparse_softmax_cross_entropy_with_logits(
        rounds, logits_parameters_output)
    loss = tf.math.reduce_mean(tf.math.reduce_sum(cross, axis=[1, 2, 3]))
    return loss
Пример #2
0
    def loss(self, samples, logging_context=None, epoch=None):
        inf = self.xinference_net(samples)
        inf_params = nb.reshape_channel_to_parameters(inf, 2)
        sample_z = self.latent_distribution.sample(inf)

        gen_params = self.xgenerative_net(sample_z)
        reconstruction_loss = self.distribution.loss(gen_params, samples)
        kl_loss = self.kl_loss(sample_z, inf_params)
        loss = tf.reduce_mean(reconstruction_loss + kl_loss)
        if logging_context is not None:
            tf.summary.scalar(logging_context + "_kl_loss",
                              kl_loss,
                              step=epoch)
            tf.summary.scalar(logging_context + "_reconstruction_loss",
                              reconstruction_loss,
                              step=epoch)
        return loss