def test_cosine_arithmetic_loss_zeros_mask(self): all_zeros_mask = np.zeros((BATCH_SIZE)) loss = losses.CosineArithmeticLoss(fake_data['pregrasp_embedding'], fake_data['goal_embedding'], fake_data['postgrasp_embedding'], all_zeros_mask) with tf.Session() as sess: output = sess.run(loss) self.assertEqual(output, 0)
def test_cosine_arithmetic_loss_mixed_mask(self): mixed_mask = np.zeros((BATCH_SIZE)) mixed_mask[0] = 1 loss = losses.CosineArithmeticLoss(fake_data['pregrasp_embedding'], fake_data['goal_embedding'], fake_data['postgrasp_embedding'], mixed_mask) with tf.Session() as sess: output = sess.run(loss) true_answer = cosine_distance((fake_data['pregrasp_embedding'][:1] - fake_data['postgrasp_embedding'][:1]), fake_data['goal_embedding'][:1]) self.assertAlmostEqual(output, true_answer[0], places=3)
def test_cosine_arithmetic_loss_ones_mask(self): all_ones_mask = np.ones((BATCH_SIZE)) loss = losses.CosineArithmeticLoss(fake_data['pregrasp_embedding'], fake_data['goal_embedding'], fake_data['postgrasp_embedding'], all_ones_mask) with tf.Session() as sess: output = sess.run(loss) true_answer = cosine_distance( fake_data['pregrasp_embedding'] - fake_data['postgrasp_embedding'], fake_data['goal_embedding']) self.assertAlmostEqual(output, np.mean(true_answer), places=3)