def test_vs_tf_onehot(self): with self.test_session(): labels = tf.constant([1, 2, 3, 0], dtype=tf.int64, name='labels') tf_one_hot = tf.one_hot(labels, depth=4) niftynet_one_hot = tf.sparse_tensor_to_dense( labels_to_one_hot(labels, 4)) self.assertAllEqual(tf_one_hot.eval(), niftynet_one_hot.eval())
def test_one_hot(self): ref = np.asarray([[[0., 1., 0., 0., 0.], [0., 0., 1., 0., 0.]], [[0., 0., 0., 1., 0.], [0., 0., 0., 0., 1.]]], dtype=np.float32) with self.test_session(): labels = tf.constant([[1, 2], [3, 4]]) # import pdb; pdb.set_trace() one_hot = tf.sparse_tensor_to_dense(labels_to_one_hot(labels, 5)).eval() self.assertAllEqual(one_hot, ref)
def test_one_hot(self): ref = np.asarray( [[[ 0., 1., 0., 0., 0.], [ 0., 0., 1., 0., 0.]], [[ 0., 0., 0., 1., 0.], [ 0., 0., 0., 0., 1.]]], dtype=np.float32) with self.test_session(): labels = tf.constant([[1, 2], [3, 4]]) #import pdb; pdb.set_trace() one_hot = tf.sparse_tensor_to_dense( labels_to_one_hot(labels, 5)).eval() self.assertAllEqual(one_hot, ref)
def test_cross_entropy_value(self): # test value is -0.5 * [1 * log(e / (1+e)) + 1 * log(e^2 / (e^2 + 1))] with self.cached_session(): predicted = tf.constant( [[0, 1], [2, 0]], dtype=tf.float32, name='predicted') labels = tf.constant([1, 0], dtype=tf.int64, name='labels') predicted, labels = [tf.expand_dims(x, axis=0) for x in (predicted, labels)] test_loss_func = LossFunction(2, loss_type='CrossEntropy') computed_cross_entropy = test_loss_func(predicted, labels) self.assertAlmostEqual( computed_cross_entropy.eval(), -.5 * (np.log(np.e / (1 + np.e)) + np.log( np.e ** 2 / (1 + np.e ** 2)))) test_dense_loss = LossFunction(2, loss_type='CrossEntropy_Dense') labels = tf.sparse_tensor_to_dense(labels_to_one_hot(labels, 2)) computed_cross_entropy = test_loss_func(predicted, tf.to_int32(labels)) self.assertAlmostEqual( computed_cross_entropy.eval(), -.5 * (np.log(np.e / (1 + np.e)) + np.log( np.e ** 2 / (1 + np.e ** 2))))
def test_cross_entropy_value(self): # test value is -0.5 * [1 * log(e / (1+e)) + 1 * log(e^2 / (e^2 + 1))] with self.test_session(): predicted = tf.constant( [[0, 1], [2, 0]], dtype=tf.float32, name='predicted') labels = tf.constant([1, 0], dtype=tf.int64, name='labels') predicted, labels = [tf.expand_dims(x, axis=0) for x in (predicted, labels)] test_loss_func = LossFunction(2, loss_type='CrossEntropy') computed_cross_entropy = test_loss_func(predicted, labels) self.assertAlmostEqual( computed_cross_entropy.eval(), -.5 * (np.log(np.e / (1 + np.e)) + np.log( np.e ** 2 / (1 + np.e ** 2)))) test_dense_loss = LossFunction(2, loss_type='CrossEntropy_Dense') labels = tf.sparse_tensor_to_dense(labels_to_one_hot(labels, 2)) computed_cross_entropy = test_loss_func(predicted, tf.to_int32(labels)) self.assertAlmostEqual( computed_cross_entropy.eval(), -.5 * (np.log(np.e / (1 + np.e)) + np.log( np.e ** 2 / (1 + np.e ** 2))))
def test_vs_tf_onehot(self): with self.test_session(): labels = tf.constant([1, 2, 3, 0], dtype=tf.int64, name='labels') tf_one_hot = tf.one_hot(labels, depth=4) niftynet_one_hot = tf.sparse_tensor_to_dense(labels_to_one_hot(labels, 4)) self.assertAllEqual(tf_one_hot.eval(), niftynet_one_hot.eval())