Exemple #1
0
def dmol_neg_log_perplexity(predictions,
                            labels,
                            weights_fn=None):
  """Average log-perplexity excluding padding 0s. No smoothing."""
  del weights_fn  # Unused
  num, den = common_layers.dml_loss(
      predictions, labels, reduce_sum=False)
  return (-num, den)
Exemple #2
0
def dmol_neg_log_perplexity(predictions,
                            labels,
                            weights_fn=None):
  """Average log-perplexity excluding padding 0s. No smoothing."""
  del weights_fn  # Unused
  num, den = common_layers.dml_loss(
      predictions, labels, reduce_sum=False)
  return (-num, den)
  def testDmlLoss(self, batch, height, width, num_mixtures, reduce_sum):
    channels = 3
    pred = tf.random_normal([batch, height, width, num_mixtures * 10])
    labels = tf.random_uniform([batch, height, width, channels],
                               minval=0, maxval=256, dtype=tf.int32)
    actual_loss_num, actual_loss_den = common_layers.dml_loss(
        pred=pred, labels=labels, reduce_sum=reduce_sum)
    actual_loss = actual_loss_num / actual_loss_den

    real_labels = common_layers.convert_rgb_to_symmetric_real(labels)
    expected_loss = common_layers.discretized_mix_logistic_loss(
        pred=pred, labels=real_labels) / channels
    if reduce_sum:
      expected_loss = tf.reduce_mean(expected_loss)

    actual_loss_val, expected_loss_val = self.evaluate(
        [actual_loss, expected_loss])
    self.assertAllClose(actual_loss_val, expected_loss_val)
  def testDmlLoss(self, batch, height, width, num_mixtures, reduce_sum):
    channels = 3
    pred = tf.random_normal([batch, height, width, num_mixtures * 10])
    labels = tf.random_uniform([batch, height, width, channels],
                               minval=0, maxval=256, dtype=tf.int32)
    actual_loss_num, actual_loss_den = common_layers.dml_loss(
        pred=pred, labels=labels, reduce_sum=reduce_sum)
    actual_loss = actual_loss_num / actual_loss_den

    real_labels = common_layers.convert_rgb_to_symmetric_real(labels)
    expected_loss = common_layers.discretized_mix_logistic_loss(
        pred=pred, labels=real_labels) / channels
    if reduce_sum:
      expected_loss = tf.reduce_mean(expected_loss)

    actual_loss_val, expected_loss_val = self.evaluate(
        [actual_loss, expected_loss])
    self.assertAllClose(actual_loss_val, expected_loss_val)
    def loss(self, logits, features):
        if self._hparams.likelihood == cia.DistributionType.DMOL:
            return common_layers.dml_loss(logits, features["targets"])

        return super(Imagetransformer, self).loss(logits, features)
 def loss(self, logits, features):
     return common_layers.dml_loss(logits, features["targets"])
Exemple #7
0
 def loss(self, pred, features):
   return common_layers.dml_loss(pred, features["targets"])
  def loss(self, logits, features):
    if self._hparams.likelihood == cia.DistributionType.DMOL:
      return common_layers.dml_loss(logits, features["targets"])

    return super(Imagetransformer, self).loss(logits, features)