def testDtypeFlexibility(self): """Tests the loss on inputs of varying data types.""" shape = [20, 3] logits = np.random.randn(*shape) targets = tf.truncated_normal(shape) positive_weights = tf.constant(3, dtype=tf.int64) negative_weights = 1 loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits, positive_weights, negative_weights) with self.test_session(): self.assertEqual(loss.eval().dtype, np.float)
def testDtypeFlexibility(self): """Tests the loss on inputs of varying data types.""" shape = [20, 3] logits = np.random.randn(*shape) targets = tf.truncated_normal(shape) positive_weights = tf.constant(3, dtype=tf.int64) negative_weights = 1 loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits, positive_weights, negative_weights) with self.test_session(): self.assertEqual(loss.eval().dtype, np.float)
def testWeightedSigmoidCrossEntropy(self, batch_size, num_labels, weight): """Tests thats the tf and numpy functions agree on many instances.""" x_shape = [batch_size, num_labels] targets = np.random.random_sample(x_shape).astype(np.float32) logits = np.random.randn(*x_shape).astype(np.float32) with self.test_session(): loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits, weight, 1.0, name='weighted-loss') expected = weighted_sigmoid_cross_entropy(targets, logits, weight) self.assertAllClose(expected, loss.eval(), atol=0.000001)
def testTrivialCompatibilityWithSigmoidCrossEntropy(self): """Tests compatibility with unweighted function with weight 1.0.""" x_shape = [300, 10] targets = np.random.random_sample(x_shape).astype(np.float32) logits = np.random.randn(*x_shape).astype(np.float32) weighted_loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits) expected_loss = ( tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( logits, targets)) with self.test_session(): self.assertAllClose(expected_loss.eval(), weighted_loss.eval(), atol=0.000001)
def testWeightedSigmoidCrossEntropy(self, batch_size, num_labels, weight): """Tests thats the tf and numpy functions agree on many instances.""" x_shape = [batch_size, num_labels] targets = np.random.random_sample(x_shape).astype(np.float32) logits = np.random.randn(*x_shape).astype(np.float32) with self.test_session(): loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits, weight, 1.0, name='weighted-loss') expected = weighted_sigmoid_cross_entropy(targets, logits, weight) self.assertAllClose(expected, loss.eval(), atol=0.000001)
def testTrivialCompatibilityWithSigmoidCrossEntropy(self): """Tests compatibility with unweighted function with weight 1.0.""" x_shape = [300, 10] targets = np.random.random_sample(x_shape).astype(np.float32) logits = np.random.randn(*x_shape).astype(np.float32) weighted_loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits) expected_loss = ( tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( logits, targets)) with self.test_session(): self.assertAllClose(expected_loss.eval(), weighted_loss.eval(), atol=0.000001)
def testNonTrivialCompatibilityWithSigmoidCrossEntropy(self): """Tests use of an arbitrary weight (4.12).""" x_shape = [300, 10] targets = np.random.random_sample(x_shape).astype(np.float32) logits = np.random.randn(*x_shape).astype(np.float32) weight = 4.12 weighted_loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits, weight, weight) expected_loss = ( weight * tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( logits, targets)) with self.test_session(): self.assertAllClose(expected_loss.eval(), weighted_loss.eval(), atol=0.000001)
def testNonTrivialCompatibilityWithSigmoidCrossEntropy(self): """Tests use of an arbitrary weight (4.12).""" x_shape = [300, 10] targets = np.random.random_sample(x_shape).astype(np.float32) logits = np.random.randn(*x_shape).astype(np.float32) weight = 4.12 weighted_loss = util.weighted_sigmoid_cross_entropy_with_logits( targets, logits, weight, weight) expected_loss = ( weight * tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( logits, targets)) with self.test_session(): self.assertAllClose(expected_loss.eval(), weighted_loss.eval(), atol=0.000001)
def testDifferentSizeWeightedSigmoidCrossEntropy(self): """Tests correctness on 3D tensors. Tests that the function works as expected when logits is a 3D tensor and targets is a 2D tensor. """ targets_shape = [30, 4] logits_shape = [targets_shape[0], targets_shape[1], 3] targets = np.random.random_sample(targets_shape).astype(np.float32) logits = np.random.randn(*logits_shape).astype(np.float32) weight_vector = [2.0, 3.0, 13.0] loss = util.weighted_sigmoid_cross_entropy_with_logits(targets, logits, weight_vector) with self.test_session(): loss = loss.eval() for i in range(0, len(weight_vector)): expected = weighted_sigmoid_cross_entropy(targets, logits[:, :, i], weight_vector[i]) self.assertAllClose(loss[:, :, i], expected, atol=0.000001)
def testGradients(self): """Tests that weighted loss gradients behave as expected.""" dummy_tensor = tf.constant(1.0) positives_shape = [10, 1] positives_logits = dummy_tensor * tf.Variable( tf.random_normal(positives_shape) + 1.0) positives_targets = tf.ones(positives_shape) positives_weight = 4.6 positives_loss = ( tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( positives_logits, positives_targets) * positives_weight) negatives_shape = [190, 1] negatives_logits = dummy_tensor * tf.Variable( tf.random_normal(negatives_shape)) negatives_targets = tf.zeros(negatives_shape) negatives_weight = 0.9 negatives_loss = ( tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( negatives_logits, negatives_targets) * negatives_weight) all_logits = tf.concat([positives_logits, negatives_logits], 0) all_targets = tf.concat([positives_targets, negatives_targets], 0) weighted_loss = tf.reduce_sum( util.weighted_sigmoid_cross_entropy_with_logits( all_targets, all_logits, positives_weight, negatives_weight)) weighted_gradients = tf.gradients(weighted_loss, dummy_tensor) expected_loss = tf.add( tf.reduce_sum(positives_loss), tf.reduce_sum(negatives_loss)) expected_gradients = tf.gradients(expected_loss, dummy_tensor) with tf.Session() as session: tf.global_variables_initializer().run() grad, expected_grad = session.run( [weighted_gradients, expected_gradients]) self.assertAllClose(grad, expected_grad)
def testDifferentSizeWeightedSigmoidCrossEntropy(self): """Tests correctness on 3D tensors. Tests that the function works as expected when logits is a 3D tensor and targets is a 2D tensor. """ targets_shape = [30, 4] logits_shape = [targets_shape[0], targets_shape[1], 3] targets = np.random.random_sample(targets_shape).astype(np.float32) logits = np.random.randn(*logits_shape).astype(np.float32) weight_vector = [2.0, 3.0, 13.0] loss = util.weighted_sigmoid_cross_entropy_with_logits(targets, logits, weight_vector) with self.test_session(): loss = loss.eval() for i in range(0, len(weight_vector)): expected = weighted_sigmoid_cross_entropy(targets, logits[:, :, i], weight_vector[i]) self.assertAllClose(loss[:, :, i], expected, atol=0.000001)
def testGradients(self): """Tests that weighted loss gradients behave as expected.""" dummy_tensor = tf.constant(1.0) positives_shape = [10, 1] positives_logits = dummy_tensor * tf.Variable( tf.random_normal(positives_shape) + 1.0) positives_targets = tf.ones(positives_shape) positives_weight = 4.6 positives_loss = ( tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( positives_logits, positives_targets) * positives_weight) negatives_shape = [190, 1] negatives_logits = dummy_tensor * tf.Variable( tf.random_normal(negatives_shape)) negatives_targets = tf.zeros(negatives_shape) negatives_weight = 0.9 negatives_loss = ( tf.contrib.nn.deprecated_flipped_sigmoid_cross_entropy_with_logits( negatives_logits, negatives_targets) * negatives_weight) all_logits = tf.concat([positives_logits, negatives_logits], 0) all_targets = tf.concat([positives_targets, negatives_targets], 0) weighted_loss = tf.reduce_sum( util.weighted_sigmoid_cross_entropy_with_logits( all_targets, all_logits, positives_weight, negatives_weight)) weighted_gradients = tf.gradients(weighted_loss, dummy_tensor) expected_loss = tf.add( tf.reduce_sum(positives_loss), tf.reduce_sum(negatives_loss)) expected_gradients = tf.gradients(expected_loss, dummy_tensor) with tf.Session() as session: tf.global_variables_initializer().run() grad, expected_grad = session.run( [weighted_gradients, expected_gradients]) self.assertAllClose(grad, expected_grad)