def _batch_sum_bce(x, target, name='binary_cross_entropy'):
   logits = functions.binary_cross_entropy_loss_with_logits(x,
                                                            target,
                                                            name=name)
   if per_output_weights is not None:
     logits *= per_output_weights
   return functions.reduce_batch_sum(logits)
示例#2
0
 def testBinaryCorssEntropyLossWithLogits(self):
     n1 = numpy.array([2., 3., 4., 5., -6., -7.], dtype=numpy.float32)
     n2 = numpy.array([1., 1., 0., 0., 0., 1.], dtype=numpy.float32)
     ftensor1 = tf.constant(n1)
     ftensor2 = tf.constant(n2)
     out = self.Run(
         functions.binary_cross_entropy_loss_with_logits(
             ftensor1, ftensor2))
     testing.assert_allclose(out[0],
                             n1 * (1 - n2) + numpy.log(1 + numpy.exp(-n1)),
                             rtol=TOLERANCE)
示例#3
0
 def testBinaryCorssEntropyLossWithLogits(self):
   n1 = numpy.array([2., 3., 4., 5., -6., -7.], dtype=numpy.float32)
   n2 = numpy.array([1., 1., 0., 0., 0., 1.], dtype=numpy.float32)
   ftensor1 = tf.constant(n1)
   ftensor2 = tf.constant(n2)
   out = self.Run(functions.binary_cross_entropy_loss_with_logits(ftensor1,
                                                                  ftensor2))
   testing.assert_allclose(
       out[0],
       n1 * (1-n2) + numpy.log(1 + numpy.exp(-n1)),
       rtol=TOLERANCE)
 def _batch_sum_bce(x, target, name='binary_cross_entropy'):
   return functions.reduce_batch_sum(
       functions.binary_cross_entropy_loss_with_logits(x, target, name=name))
 def _batch_sum_bce(x, target, name="binary_cross_entropy"):
     logits = functions.binary_cross_entropy_loss_with_logits(x, target, name=name)
     if per_output_weights is not None:
         logits *= per_output_weights
     return functions.reduce_batch_sum(logits)