Beispiel #1
0
 def test_sparse_loss(self):
   """Test int labels."""
   labels = tf.constant([0, 2, 1, 0])
   activations = [[-0.5, 0.1, 2.0], [0.1, 1.5, -5.0], [4.0, -3.0, -6.0],
                  [-1.5, 0.7, 5.2]]
   actual_loss = loss.bi_tempered_logistic_loss(activations,
                                                tf.one_hot(labels, 3), 0.5,
                                                1.5)
   sparse_loss = loss.sparse_bi_tempered_logistic_loss(activations, labels,
                                                       0.5, 1.5)
   with self.cached_session() as sess:
     actual_loss_out = sess.run(actual_loss)
     sparse_loss_out = sess.run(sparse_loss)
     self.assertAllClose(actual_loss_out, sparse_loss_out)
   labels = tf.constant([[0, 2], [1, 0]])
   activations = [[[-0.5, 0.1, 2.0], [0.1, 1.5, -5.0]],
                  [[4.0, -3.0, -6.0], [-1.5, 0.7, 5.2]]]
   actual_loss = loss.bi_tempered_logistic_loss(activations,
                                                tf.one_hot(labels, 3), 0.5,
                                                1.5)
   sparse_loss = loss.sparse_bi_tempered_logistic_loss(activations, labels,
                                                       0.5, 1.5)
   with self.cached_session() as sess:
     actual_loss_out = sess.run(actual_loss)
     sparse_loss_out = sess.run(sparse_loss)
     self.assertAllClose(actual_loss_out, sparse_loss_out)
Beispiel #2
0
 def test_loss_value(self):
   """Test the loss based on precomputed values."""
   labels = tf.constant([[0.2, 0.3, 0.5], [0.6, 0.3, 0.1], [0.2, 0.8, 0.0]])
   activations = [[-0.5, 0.1, 2.0], [0.1, 1.5, -5.0], [4.0, -3.0, -6.0]]
   with self.cached_session():
     actual_loss = loss.bi_tempered_logistic_loss(activations, labels, 0.5,
                                                  1.5)
     self.assertAllClose(actual_loss.eval(),
                         [0.02301914, 0.18972909, 0.93874922])
     actual_loss = loss.bi_tempered_logistic_loss(activations, labels, 0.5,
                                                  0.8, num_iters=20)
     self.assertAllClose(actual_loss.eval(),
                         [0.21646356, 0.41836615, 1.33997854])
Beispiel #3
0
 def test_constant_shift(self):
   """Test if adding a constant to all activations is vacuous."""
   labels = tf.constant([[0.2, 0.3, 0.5], [0.4, 0.4, 0.2], [0.7, 0.2, 0.1]])
   activations = tf.random.normal(shape=[3, 3])
   bias = tf.random.normal(shape=[3, 1])
   with self.cached_session() as sess:
     actual_loss = loss.bi_tempered_logistic_loss(activations, labels, 0.5,
                                                  1.2)
     shifted_loss = loss.bi_tempered_logistic_loss(activations + bias, labels,
                                                   0.5, 1.2)
     self.assertEqual(actual_loss.shape, [3])
     actual_loss_out, shifted_loss_out = sess.run([actual_loss, shifted_loss])
     self.assertAllClose(actual_loss_out, shifted_loss_out)
Beispiel #4
0
 def test_constant_shift(self):
     """Test if adding a constant to all activations is vacuous."""
     labels = tf.constant([[0.2, 0.3, 0.5], [0.4, 0.4, 0.2],
                           [0.7, 0.2, 0.1]])
     activations = tf.random.normal(shape=[3, 3])
     bias = tf.random.normal(shape=[3, 1])
     for t2 in [0.8, 1.2]:
         actual_loss = loss.bi_tempered_logistic_loss(
             activations, labels, 0.5, t2)
         shifted_loss = loss.bi_tempered_logistic_loss(
             activations + bias, labels, 0.5, t2)
         actual_loss_out, shifted_loss_out = (actual_loss.numpy(),
                                              shifted_loss.numpy())
         self.assertAllClose(actual_loss_out, shifted_loss_out)
Beispiel #5
0
 def test_label_smoothing(self):
   """Test label smoothing."""
   labels = tf.constant([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])
   activations = [[-0.5, 0.1, 2.0], [0.1, 1.5, -5.0], [4.0, -3.0, -6.0]]
   actual_loss = loss.bi_tempered_logistic_loss(
       activations, labels, 0.5, 1.5, label_smoothing=0.1)
   with self.cached_session() as sess:
     actual_loss_out = sess.run(actual_loss)
     self.assertAllClose(
         actual_loss_out, [0.76652711, 0.08627685, 1.35443510], atol=1e-5)
Beispiel #6
0
 def test_limit_case_logistic_loss(self):
   """Test for checking if t1 = t2 = 1.0 yields the logistic loss."""
   labels = tf.constant([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])
   activations = tf.random.normal(shape=[3, 3])
   with self.cached_session() as sess:
     actual_loss = loss.bi_tempered_logistic_loss(activations, labels, 1.0,
                                                  1.0)
     logistic_loss = tf.nn.softmax_cross_entropy_with_logits(
         logits=activations, labels=labels)
     actual_loss_out, logistic_loss_out = sess.run(
         [actual_loss, logistic_loss])
     self.assertAllClose(actual_loss_out, logistic_loss_out)
Beispiel #7
0
 def test_dynamic_temperatures(self):
   """Test changing temperatures dynamically."""
   t1 = tf.placeholder(tf.float32)
   t2 = tf.placeholder(tf.float32)
   labels = tf.constant([[0.2, 0.5, 0.3]])
   activations = [[-0.5, 0.1, 2.0]]
   actual_loss = loss.bi_tempered_logistic_loss(
       activations, labels, t1, t2, num_iters=5)
   t1_values = [1.0, 0.9, 0.8, 0.7]
   t2_values = [1.0, 1.1, 1.2, 1.3]
   loss_values = [[0.62870466], [0.45677936], [0.34298314], [0.26295574]]
   loss_out = []
   with self.cached_session() as sess:
     for t1_value, t2_value in zip(t1_values, t2_values):
       loss_out.append(sess.run(
           actual_loss, feed_dict={t1: t1_value, t2: t2_value}))
     self.assertAllClose(loss_values, loss_out, atol=1e-5)
Beispiel #8
0
 def test_gradient_error(self):
   """Compare custom gradient with tf.gradient."""
   labels = tf.constant([[0.4, 0.3, 0.3], [0.8, 0.1, 0.1], [0.0, 0.0, 1.0],
                         [0.0, 1.0, 0.0]])
   activations = tf.random.normal(shape=[4, 3])
   internal_loss = loss._internal_bi_tempered_logistic_loss(
       activations, labels, 0.5, 1.5)
   numerical_gradient = tf.gradients(internal_loss, activations)
   actual_loss = loss.bi_tempered_logistic_loss(activations, labels, 0.5, 1.5)
   actual_gradient = tf.gradients(actual_loss, activations)
   with self.cached_session() as sess:
     internal_loss_out, actual_loss_out = sess.run(
         [internal_loss, actual_loss])
     numerical_gradient_out, actual_gradient_out = sess.run(
         [numerical_gradient[0], actual_gradient[0]])
     self.assertEqual(actual_gradient_out.shape, (4, 3))
     self.assertAllClose(actual_loss_out, internal_loss_out)
     self.assertAllClose(
         actual_gradient_out, numerical_gradient_out, atol=1e-5)
Beispiel #9
0
 def test_gradient_error(self):
     """Compare custom gradient with tf.GradientTape."""
     labels = tf.constant([[0.4, 0.3, 0.3], [0.8, 0.1, 0.1],
                           [0.0, 0.0, 1.0], [0.0, 1.0, 0.0]])
     activations = tf.random.normal(shape=[4, 3])
     for t1, t2 in [[0.5, 1.0], [1.0, 1.5], [0.5, 1.5]]:
         with tf.GradientTape(persistent=True) as tape:
             tape.watch(activations)
             internal_loss = loss._internal_bi_tempered_logistic_loss(
                 activations, labels, t1, t2)
             actual_loss = loss.bi_tempered_logistic_loss(
                 activations, labels, t1, t2)
         numerical_gradient = tape.gradient(internal_loss, activations)
         actual_gradient = tape.gradient(actual_loss, activations)
         internal_loss_out, actual_loss_out = (internal_loss.numpy(),
                                               actual_loss.numpy())
         numerical_gradient_out, actual_gradient_out = (
             numerical_gradient.numpy(), actual_gradient.numpy())
         self.assertEqual(actual_gradient_out.shape, (4, 3))
         self.assertAllClose(actual_loss_out, internal_loss_out, atol=1e-5)
         self.assertAllClose(actual_gradient_out,
                             numerical_gradient_out,
                             atol=1e-4)