def test_loss_gradient(self): (_, _), (x_test, y_test) = self.mnist classifier = ClassifierWrapper(self.model_mnist) # Test gradient grads = classifier.loss_gradient(x_test, y_test) self.assertTrue(np.array(grads.shape == (NB_TEST, 28, 28, 1)).all()) self.assertNotEqual(np.sum(grads), 0)
def test_save(self): import os path = 'tmp' filename = 'model.h5' classifier = ClassifierWrapper(self.model_mnist) classifier.save(filename, path=path) self.assertTrue(os.path.isfile(os.path.join(path, filename))) # Remove saved file os.remove(os.path.join(path, filename))
def test_layers(self): (_, _), (x_test, _), _, _ = load_mnist() x_test = x_test[:NB_TEST] classifier = ClassifierWrapper(self.model_mnist) self.assertEqual(len(classifier.layer_names), 3) layer_names = classifier.layer_names for i, name in enumerate(layer_names): act_i = classifier.get_activations(x_test, i, batch_size=128) act_name = classifier.get_activations(x_test, name, batch_size=128) self.assertAlmostEqual(np.sum(act_name - act_i), 0)
def test_shapes(self): x_test, y_test = self.mnist[1] classifier = ClassifierWrapper(self.model_mnist) preds = classifier.predict(self.mnist[1][0]) self.assertTrue(preds.shape == y_test.shape) self.assertTrue(classifier.nb_classes == 10) class_grads = classifier.class_gradient(x_test[:11]) self.assertTrue(class_grads.shape == tuple([11, 10] + list(x_test[1].shape))) loss_grads = classifier.loss_gradient(x_test[:11], y_test[:11]) self.assertTrue(loss_grads.shape == x_test[:11].shape)
def test_class_gradient(self): (_, _), (x_test, _) = self.mnist classifier = ClassifierWrapper(self.model_mnist) # Test all gradients label grads = classifier.class_gradient(x_test) self.assertTrue(np.array(grads.shape == (NB_TEST, 10, 28, 28, 1)).all()) self.assertTrue(np.sum(grads) != 0) # Test 1 gradient label = 5 grads = classifier.class_gradient(x_test, label=5) self.assertTrue(np.array(grads.shape == (NB_TEST, 1, 28, 28, 1)).all()) self.assertTrue(np.sum(grads) != 0) # Test a set of gradients label = array label = np.random.randint(5, size=NB_TEST) grads = classifier.class_gradient(x_test, label=label) self.assertTrue(np.array(grads.shape == (NB_TEST, 1, 28, 28, 1)).all()) self.assertTrue(np.sum(grads) != 0)