def test_histogram_loss_incompatible_shapes(self, descs_n, labels_n): assume(descs_n != labels_n) descs_pl = tf.placeholder(tf.float32, [descs_n, None]) labels_pl = tf.placeholder(tf.int32, [labels_n]) with pytest.raises(ValueError): histogram_loss(descs_pl, labels_pl, 128)
def test_histogram_loss_wrong_descriptors_dims(self, descs_shape): assume(len(descs_shape) != 2) descs_pl = tf.placeholder(tf.float32, descs_shape) labels_pl = tf.placeholder(tf.int32, [None]) with pytest.raises(ValueError): histogram_loss(descs_pl, labels_pl, 128)
def test_eager_histogram_loss_adding_constants_to_labels_invariance( self, descs, labels, n_bins, offset): # discard uniform labels assume(len(np.unique(labels)) != 1) # l2 normalize norm = np.sqrt(np.sum(descs**2, axis=1) + 1e-12) descs /= np.expand_dims(norm, -1) with eager_mode(): desired = histogram_loss(descs, labels, n_bins) actual = histogram_loss(descs, labels + offset, n_bins) np.testing.assert_almost_equal(actual, desired)
def test_histogram_loss_eager_graph_consistency(self, graph, descs, labels): # discard uniform labels assume(len(np.unique(labels)) != 1) # l2 normalize norm = np.sqrt(np.sum(descs**2, axis=1) + 1e-12) descs /= np.expand_dims(norm, -1) # compute eager output with eager_mode(): eager_output = histogram_loss(descs, labels, 128) # compute graph output descs_pl, labels_pl, loss = graph with tf.Session() as sess: graph_output = sess.run(loss, feed_dict={ descs_pl: descs, labels_pl: labels }) np.testing.assert_almost_equal(np.array(eager_output), graph_output)
def graph(): descs_pl = tf.placeholder(tf.float32, [None, None]) labels_pl = tf.placeholder(tf.int32, [None]) loss = histogram_loss(descs_pl, labels_pl, 128) yield descs_pl, labels_pl, loss
def test_histogram_loss_wrong_labels_dims(self, labels_shape): descs_pl = tf.placeholder(tf.float32, [None, None]) labels_pl = tf.placeholder(tf.int32, labels_shape) with pytest.raises(ValueError): histogram_loss(descs_pl, labels_pl, 128)