def grad_cam(self, value, index): for classe in self.labels: grad_cam = GradCAM() grid = grad_cam.explain((value, self.setY[index]), self.model, class_index=classe, layer_name=self.target_layers) name = "n_{}_".format(index) + str(classe) + "_grad_cam.png" grad_cam.save(grid, ".", sg.PATH_GRAD + name)
def save_all_explainer(validation_data, model, name_conv, n_conv=1, dir_save_im='./', save_name='outputs'): explainerGradCam = GradCAM() explainerActiv = ExtractActivations() explainerOccl = OcclusionSensitivity() explainerSmoothGrad = SmoothGrad() for i in range(1, n_conv + 1): output = explainerActiv.explain(validation_data, model, '{}_{}'.format(name_conv, i)) explainerActiv.save(output, dir_save_im, '{}-activ-conv{}.jpg'.format(save_name, i)) output = explainerGradCam.explain(validation_data, model, '{}_{}'.format(name_conv, i), 0) explainerGradCam.save(output, dir_save_im, '{}-gradCam0-conv{}.jpg'.format(save_name, i)) output = explainerSmoothGrad.explain(validation_data, model, 0) explainerSmoothGrad.save(output, dir_save_im, '{}-smooth0.jpg'.format(save_name)) output = explainerSmoothGrad.explain(validation_data, model, 1) explainerSmoothGrad.save(output, dir_save_im, '{}-smooth1.jpg'.format(save_name)) output = explainerOccl.explain(validation_data, model, 0, 5) explainerOccl.save(output, dir_save_im, '{}-occlSens0.jpg'.format(save_name)) output = explainerOccl.explain(validation_data, model, 1, 5) explainerOccl.save(output, dir_save_im, '{}-occlSens1.jpg'.format(save_name))
plt.xlabel('Epoch') plt.ylabel('Accuracy') plt.ylim([0.5, 1]) plt.legend(loc='lower right') test_loss, test_acc = model.evaluate(images_vali, labels_vali, verbose=2) print(test_acc) """Our simple CNN has achieved a test accuracy of over 70%. Not bad for a few lines of code! For another CNN style, see an example using the Keras subclassing API and a `tf.GradientTape` [here](https://www.tensorflow.org/tutorials/quickstart/advanced).""" #%% # Instantiation of the explainer explainer = GradCAM() for class_index in range(7): ind_in_images_vali = np.where(labels_vali == class_index)[0] img_explain = images_vali[ind_in_images_vali[0:25], :, :, :] # print(img_single.size) data = (img_explain, None) # Save output output_dir = '.' output_name = 'grad_cam_class_%d.png' % class_index output = explainer.explain(data, model, "conv3", class_index) explainer.save(output, output_dir, output_name)
import tensorflow as tf from tf_explain.core.grad_cam import GradCAM IMAGE_PATH = './cat.jpg' if __name__ == '__main__': model = tf.keras.applications.vgg16.VGG16(weights='imagenet', include_top=True) img = tf.keras.preprocessing.image.load_img(IMAGE_PATH, target_size=(224, 224)) img = tf.keras.preprocessing.image.img_to_array(img) model.summary() data = ([img], None) tabby_cat_class_index = 281 explainer = GradCAM() # Compute GradCAM on VGG16 grid = explainer.explain(data, model, 'block5_conv3', tabby_cat_class_index) explainer.save(grid, '.', 'grad_cam.png')
import tensorflow as tf from tf_explain.core.grad_cam import GradCAM IMAGE_PATH = "./cat.jpg" if __name__ == "__main__": model = tf.keras.applications.vgg16.VGG16(weights="imagenet", include_top=True) img = tf.keras.preprocessing.image.load_img(IMAGE_PATH, target_size=(224, 224)) img = tf.keras.preprocessing.image.img_to_array(img) print(img.shape) model.summary() data = ([img], None) tabby_cat_class_index = 281 explainer = GradCAM() # Compute GradCAM on VGG16 grid = explainer.explain(data, model, "block5_conv3", tabby_cat_class_index) explainer.save(grid, ".", "grad_cam.png")