def testCrossEntropyLossAllCorrect(self): with self.test_session(): logits = tf.constant([[10.0, 0.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 10.0]]) labels = tf.constant([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) loss = losses.cross_entropy_loss(logits, labels) self.assertEqual(loss.op.name, 'CrossEntropyLoss/value') self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testCrossEntropyLossAllWrongWithWeight(self): with self.test_session(): logits = tf.constant([[10.0, 0.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 10.0]]) labels = tf.constant([[0, 0, 1], [1, 0, 0], [0, 1, 0]]) loss = losses.cross_entropy_loss(logits, labels, weight=0.5) self.assertEqual(loss.op.name, 'CrossEntropyLoss/value') self.assertAlmostEqual(loss.eval(), 5.0, 3)
def testCrossEntropyLossAllWrongWithWeight(self): with self.test_session(): logits = tf.constant([[10.0, 0.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 10.0]]) labels = tf.constant([[0, 0, 1], [1, 0, 0], [0, 1, 0]]) loss = losses.cross_entropy_loss(logits, labels, weight=0.5) self.assertEquals(loss.op.name, 'CrossEntropyLoss/value') self.assertAlmostEqual(loss.eval(), 5.0, 3)