예제 #1
0
    def interpret_signal(self, signal):
        """

        Args:
            signal:

        Returns:

        """
        
        output = signal.unsqueeze(0).to(self.device)

        for i in self.modules:
            layer = self.modules[i]
            layer.eval().to(self.device)
            output = layer(output)
            
        output = torch.flatten(output,start_dim=1)
        
        reduced_signal = self.pca.transform(output.detach())
        example_index = self.__closest_point(reduced_signal)
        example_signal = self.X[example_index]
        
        signal_attribution = self.algo.attribute(signal.unsqueeze(0), target=0).detach().cpu().numpy()[0]
        example_signal_attribution = self.algo.attribute(example_signal.unsqueeze(0), target=0).detach().cpu().numpy()[0]
        
        signal_attribution = convert_to_grayscale(signal_attribution* signal.detach().numpy())
        example_signal_attribution = convert_to_grayscale(example_signal_attribution* example_signal.detach().numpy())
        
        # save_gradient_images(grayscale_guided_grads, result_dir + '_Guided_BP_gray')
        
        return self.__plot_signals([signal, example_signal], [signal_attribution, example_signal_attribution]
                                   , int(torch.round(torch.sigmoid(self.model(signal.unsqueeze(0).to(self.device)))).item())
                                   , self.y[example_index].item(), self.channel_labels)
def vanilla_backprop_example(train,dataset,modeltype,dp):
    f, axarr = plt.subplots(3,2) 
    f.set_figheight(10)
    f.set_figwidth(10)

    classes = train_models.get_classes(dataset)
    pretrained_model = train_models.get_trained_model(train,dataset,modeltype,dp)
    VBP = VanillaBackprop(pretrained_model)

    print('model loaded, forwarding examples')

    # show example index 0
    example_index = 0
    target_class = train_models.get_example_class_target(dataset,example_index)
    original_image = train_models.get_example_input_image(dataset,example_index)
    prep_img = train_models.get_example_preprocessed_image(dataset,example_index)
    vanilla_grads = VBP.generate_gradients(prep_img, target_class)
    grayscale_vanilla_grads = convert_to_grayscale(vanilla_grads)
    axarr[example_index,0].imshow(original_image)
    axarr[example_index,0].title.set_text(f'Original Image - {classes[target_class]}')
    axarr[example_index,1].imshow(grayscale_vanilla_grads.squeeze(0))
    axarr[example_index,1].title.set_text(f'Gradient Visualization Vanilla Backprop - {classes[target_class]}')
    
    example_index = 1
    target_class = train_models.get_example_class_target(dataset,example_index)
    original_image = train_models.get_example_input_image(dataset,example_index)
    prep_img = train_models.get_example_preprocessed_image(dataset,example_index)
    vanilla_grads = VBP.generate_gradients(prep_img, target_class)
    grayscale_vanilla_grads = convert_to_grayscale(vanilla_grads)
    axarr[example_index,0].imshow(original_image)
    axarr[example_index,0].title.set_text(f'Original Image - {classes[target_class]}')
    axarr[example_index,1].imshow(grayscale_vanilla_grads.squeeze(0))
    axarr[example_index,1].title.set_text(f'Gradient Visualization Vanilla Backprop - {classes[target_class]}')
    
    example_index = 2
    target_class = train_models.get_example_class_target(dataset,example_index)
    original_image = train_models.get_example_input_image(dataset,example_index)
    prep_img = train_models.get_example_preprocessed_image(dataset,example_index)
    vanilla_grads = VBP.generate_gradients(prep_img, target_class)
    grayscale_vanilla_grads = convert_to_grayscale(vanilla_grads)
    axarr[example_index,0].imshow(original_image)
    axarr[example_index,0].title.set_text(f'Original Image - {classes[target_class]}')
    axarr[example_index,1].imshow(grayscale_vanilla_grads.squeeze(0))
    axarr[example_index,1].title.set_text(f'Gradient Visualization Vanilla Backprop - {classes[target_class]}')

    plt.show()    


    print('Vanilla backprop completed')
예제 #3
0
def vis_grad(model, class_index, layer, image_path, size=[224, 224]):
    original_image = cv2.imread(image_path, 1)
    #plt.imshow(original_image)
    #plt.show()
    prep_img = preprocess_image(original_image, size)
    file_name_to_export = 'model' + '_classindex_' + str(
        class_index) + '-layer_' + str(layer)

    # Grad cam
    gcv2 = GradCam(model, target_layer=layer)
    # Generate cam mask
    cam = gcv2.generate_cam(prep_img, class_index, size)
    print('Grad cam completed')

    # Guided backprop
    GBP = GuidedBackprop(model)
    # Get gradients
    guided_grads = GBP.generate_gradients(prep_img, class_index)
    print('Guided backpropagation completed')

    # Guided Grad cam
    cam_gb = guided_grad_cam(cam, guided_grads)
    #save_gradient_images(cam_gb, file_name_to_export + '_GGrad_Cam')
    grayscale_cam_gb = convert_to_grayscale(cam_gb)
    #save_gradient_images(grayscale_cam_gb, file_name_to_export + '_GGrad_Cam_gray')
    print('Guided grad cam completed')
    cam_gb = trans(cam_gb)
    grayscale_cam_gb = trans(grayscale_cam_gb)

    return cam_gb, grayscale_cam_gb
예제 #4
0
    def guided_grad_cam(self,
                        numpy_img,
                        target_pspi,
                        file_name_to_export='test',
                        save=False):
        # prep image for the network
        prep_img = torch.from_numpy(
            cv2.resize(numpy_img,
                       (200, 200))[None] / 255).float().unsqueeze_(0)
        prep_img = prep_img.requires_grad_().cuda()

        # Grad cam
        # Generate cam mask
        cam = generate_cam(self.pretrained_model, prep_img, target_pspi)

        # Guided backprop
        GBP = GuidedBackprop(self.pretrained_model)
        # Get gradients
        guided_grads = GBP.generate_gradients(prep_img, target_pspi)

        # Guided Grad cam
        cam_gb = guided_grad_cam(cam, guided_grads)
        grayscale_cam_gb = convert_to_grayscale(cam_gb)
        if save:
            save_gradient_images(cam_gb, file_name_to_export + '_GGrad_Cam')
            save_gradient_images(grayscale_cam_gb,
                                 file_name_to_export + '_GGrad_Cam_gray')
        return cam_gb, grayscale_cam_gb
예제 #5
0
def generate_one_image_smooth_gradient(model_name,
                                       image_name,
                                       target_class,
                                       full_image_path,
                                       pretrain=True):
    if model_name == 'denseNet':
        pretrained_model = models.densenet121(pretrained=pretrain)
    elif model_name == "resNet":  # res net
        pretrained_model = models.resnet18(pretrained=pretrain)

    original_image = Image.open(full_image_path).convert('RGB')
    prep_img = customPreProcessing(original_image)
    save_image_name = image_name + model_name + '_SM_pretrained'
    # save_image_name = image_name + model_name + '_SM_randomWeights'

    # Vanilla backprop
    VBP = VanillaBackprop(pretrained_model, _type=model_name)
    param_n = 50
    param_sigma_multiplier = 4
    smooth_grad = generate_smooth_grad(
        VBP,  # ^This parameter
        prep_img,
        target_class,
        param_n,
        param_sigma_multiplier)
    # Save colored gradients
    save_gradient_images(smooth_grad, save_image_name + '_colored')
    # Convert to grayscale
    grayscale_smooth_grad = convert_to_grayscale(smooth_grad)
    # Save grayscale gradients
    save_gradient_images(grayscale_smooth_grad, save_image_name)
    print('Smooth grad completed')
예제 #6
0
 def interpret_image(self, image, target_class, result_dir):
     # Get gradients
     guided_grads = self.algo.attribute(
         image, target_class).detach().cpu().numpy()[0]
     print(guided_grads.shape)
     # Save colored gradients
     save_gradient_images(guided_grads, result_dir + '_Guided_BP_color')
     # Convert to grayscale
     grayscale_guided_grads = convert_to_grayscale(
         guided_grads * image.detach().numpy()[0])
     # Save grayscale gradients
     save_gradient_images(grayscale_guided_grads,
                          result_dir + '_Guided_BP_gray')
     # Positive and negative saliency maps
     pos_sal, neg_sal = get_positive_negative_saliency(guided_grads)
     save_gradient_images(pos_sal, result_dir + '_pos_sal')
     save_gradient_images(neg_sal, result_dir + '_neg_sal')
     print('Guided backprop completed')
예제 #7
0
        one_hot_output[0][target_class] = 1
        # Backward pass
        model_output.backward(gradient=one_hot_output)
        # Convert Pytorch variable to numpy array
        # [0] to get rid of the first channel (1,3,224,224)
        gradients_as_arr = self.gradients.data.numpy()[0]
        return gradients_as_arr


if __name__ == '__main__':
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_example_params(target_example)

    # Guided backprop
    GBP = GuidedBackprop(pretrained_model)
    # Get gradients
    guided_grads = GBP.generate_gradients(prep_img, target_class)
    # Save colored gradients
    save_gradient_images(guided_grads, file_name_to_export + '_Guided_BP_color')
    print("exported to: " + str(os.getcwd()))
    # Convert to grayscale
    grayscale_guided_grads = convert_to_grayscale(guided_grads)
    # Save grayscale gradients
    save_gradient_images(grayscale_guided_grads, file_name_to_export + '_Guided_BP_gray')
    # Positive and negative saliency maps
    pos_sal, neg_sal = get_positive_negative_saliency(guided_grads)
    save_gradient_images(pos_sal, file_name_to_export + '_pos_sal')
    save_gradient_images(neg_sal, file_name_to_export + '_neg_sal')
    print('Guided backprop completed')
예제 #8
0
        model_output = self.model(self.input_image)
        # Zero grads
        self.model.zero_grad()
        # Target for backprop
        one_hot_output = torch.FloatTensor(1, model_output.size()[-1]).zero_()
        one_hot_output[0][self.target_class] = 1
        # Backward pass
        model_output.backward(gradient=one_hot_output)
        # Convert Pytorch variable to numpy array
        # [0] to get rid of the first channel (1,3,224,224)
        gradients_as_arr = self.gradients.data.numpy()[0]
        return gradients_as_arr


if __name__ == '__main__':
    # Get params
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_params(target_example)
    # Vanilla backprop
    VBP = VanillaBackprop(pretrained_model, prep_img, target_class)
    # Generate gradients
    vanilla_grads = VBP.generate_gradients()
    # Save colored gradients
    save_gradient_images(vanilla_grads, file_name_to_export + '_Vanilla_BP_color')
    # Convert to grayscale
    grayscale_vanilla_grads = convert_to_grayscale(vanilla_grads)
    # Save grayscale gradients
    save_gradient_images(grayscale_vanilla_grads, file_name_to_export + '_Vanilla_BP_gray')
    print('Vanilla backprop completed')
예제 #9
0
    """
    cam_gb = np.multiply(grad_cam_mask, guided_backprop_mask)
    return cam_gb


if __name__ == '__main__':
    # Get params
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_example_params(target_example)

    # Grad cam
    # gcv2 = GradCam(pretrained_model, target_layer=11)
    gcv2 = GradCam(pretrained_model, target_layer=7)
    # Generate cam mask
    cam = gcv2.generate_cam(prep_img, target_class)
    print('Grad cam completed')

    # Guided backprop
    GBP = GuidedBackprop(pretrained_model)
    # Get gradients
    guided_grads = GBP.generate_gradients(prep_img, target_class)
    print('Guided backpropagation completed')

    # Guided Grad cam
    cam_gb = guided_grad_cam(cam, guided_grads)
    save_gradient_images(cam_gb, file_name_to_export + '_GGrad_Cam')
    grayscale_cam_gb = convert_to_grayscale(cam_gb)
    save_gradient_images(grayscale_cam_gb,
                         file_name_to_export + '_GGrad_Cam_gray')
    print('Guided grad cam completed')
예제 #10
0
                            convert_to_grayscale,
                            save_gradient_images,
                            get_positive_negative_saliency)
from captum.attr import GuidedBackprop
from parkinsonsNet import Network

model = torch.load("/home/anasa2/pre_trained/parkinsonsNet-rest_mpower-rest.pth", map_location="cpu")  

algo = GuidedBackprop(model)

# %%
import numpy as np

input = torch.randn(1, 3, 4000, requires_grad=True)
attribution = algo.attribute(input, target=0).detach().cpu().numpy()[0]
attribution = np.round(convert_to_grayscale(attribution* input.detach().numpy()[0]))
save_gradient_images(attribution, 'signal_color')

# %%
import pandas as pd
import numpy as np
import os
from torch.utils.data import Dataset, DataLoader
import math
import json
import time
from matplotlib.pylab import plt
%matplotlib inline 
import itertools

class testMotionData(Dataset):
        # Generate xbar images
        xbar_list = self.generate_images_on_linear_path(input_image, steps)
        # Initialize an iamge composed of zeros
        integrated_grads = np.zeros(input_image.size())
        for xbar_image in xbar_list:
            # Generate gradients from xbar images
            single_integrated_grad = self.generate_gradients(
                xbar_image, target_class)
            # Add rescaled grads from xbar images
            integrated_grads = integrated_grads + single_integrated_grad / steps
        # [0] to get rid of the first channel (1,3,224,224)
        return integrated_grads[0]


if __name__ == '__main__':
    # Get params
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_example_params(target_example)
    # Vanilla backprop
    IG = IntegratedGradients(pretrained_model)
    # Generate gradients
    integrated_grads = IG.generate_integrated_gradients(
        prep_img, target_class, 100)
    # Convert to grayscale
    grayscale_integrated_grads = convert_to_grayscale(integrated_grads)
    # Save grayscale gradients
    save_gradient_images(grayscale_integrated_grads,
                         file_name_to_export + '_Integrated_G_gray')
    print('Integrated gradients completed.')
예제 #12
0

if __name__ == '__main__':
    # Get params
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_example_params(target_example)

    VBP = VanillaBackprop(pretrained_model)
    # GBP = GuidedBackprop(pretrained_model)  # if you want to use GBP dont forget to
    # change the parametre in generate_smooth_grad

    param_n = 50
    param_sigma_multiplier = 4
    smooth_grad = generate_smooth_grad(
        VBP,  # ^This parameter
        prep_img,
        target_class,
        param_n,
        param_sigma_multiplier)

    # Save colored gradients
    save_gradient_images(smooth_grad,
                         file_name_to_export + '_SmoothGrad_color')
    # Convert to grayscale
    grayscale_smooth_grad = convert_to_grayscale(smooth_grad)
    # Save grayscale gradients
    save_gradient_images(grayscale_smooth_grad,
                         file_name_to_export + '_SmoothGrad_gray')
    print('Smooth grad completed')
        one_hot_output = torch.FloatTensor(1, model_output.size()[-1]).zero_()
        one_hot_output[0][target_class] = 1
        # Backward pass
        model_output.backward(gradient=one_hot_output)
        # Convert Pytorch variable to numpy array
        # [0] to get rid of the first channel (1,3,224,224)
        gradients_as_arr = self.gradients.data.numpy()[0]
        return gradients_as_arr


if __name__ == '__main__':
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_params(target_example)

    # Guided backprop
    GBP = GuidedBackprop(pretrained_model)
    # Get gradients
    guided_grads = GBP.generate_gradients(prep_img, target_class)
    # Save colored gradients
    save_gradient_images(guided_grads, file_name_to_export + '_Guided_BP_color')
    # Convert to grayscale
    grayscale_guided_grads = convert_to_grayscale(guided_grads)
    # Save grayscale gradients
    save_gradient_images(grayscale_guided_grads, file_name_to_export + '_Guided_BP_gray')
    # Positive and negative saliency maps
    pos_sal, neg_sal = get_positive_negative_saliency(guided_grads)
    save_gradient_images(pos_sal, file_name_to_export + '_pos_sal')
    save_gradient_images(neg_sal, file_name_to_export + '_neg_sal')
    print('Guided backprop completed')
예제 #14
0
"""
Created on Wed Jun 19 17:12:04 2019

@author: Utku Ozbulak - github.com/utkuozbulak
"""
from misc_functions import (get_example_params, convert_to_grayscale,
                            save_gradient_images)
from vanilla_backprop import VanillaBackprop
# from guided_backprop import GuidedBackprop  # To use with guided backprop
# from integrated_gradients import IntegratedGradients  # To use with integrated grads

if __name__ == '__main__':
    # Get params
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_example_params(target_example)
    # Vanilla backprop
    VBP = VanillaBackprop(pretrained_model)
    # Generate gradients
    vanilla_grads = VBP.generate_gradients(prep_img, target_class)

    # Make sure dimensions add up!
    grad_times_image = vanilla_grads * prep_img.detach().numpy()[0]
    # Convert to grayscale
    grayscale_vanilla_grads = convert_to_grayscale(grad_times_image)
    # Save grayscale gradients
    save_gradient_images(
        grayscale_vanilla_grads,
        file_name_to_export + '_Vanilla_grad_times_image_gray')
    print('Grad times image completed.')
    # Average it out
    smooth_grad = smooth_grad / param_n
    return smooth_grad


if __name__ == '__main__':
    # Get params
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_params(target_example)

    VBP = VanillaBackprop(pretrained_model)
    # GBP = GuidedBackprop(pretrained_model)  # if you want to use GBP dont forget to
    # change the parametre in generate_smooth_grad

    param_n = 50
    param_sigma_multiplier = 4
    smooth_grad = generate_smooth_grad(VBP,  # ^This parameter
                                       prep_img,
                                       target_class,
                                       param_n,
                                       param_sigma_multiplier)

    # Save colored gradients
    save_gradient_images(smooth_grad, file_name_to_export + '_SmoothGrad_color')
    # Convert to grayscale
    grayscale_smooth_grad = convert_to_grayscale(smooth_grad)
    # Save grayscale gradients
    save_gradient_images(grayscale_smooth_grad, file_name_to_export + '_SmoothGrad_gray')
    print('Smooth grad completed')
        guided_backprop_mask (np_arr):Guided backprop mask
    """
    cam_gb = np.multiply(grad_cam_mask, guided_backprop_mask)
    return cam_gb


if __name__ == '__main__':
    # Get params
    target_example = 0  # Snake
    (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\
        get_params(target_example)

    # Grad cam
    gcv2 = GradCam(pretrained_model, target_layer=11)
    # Generate cam mask
    cam = gcv2.generate_cam(prep_img, target_class)
    print('Grad cam completed')

    # Guided backprop
    GBP = GuidedBackprop(pretrained_model)
    # Get gradients
    guided_grads = GBP.generate_gradients(prep_img, target_class)
    print('Guided backpropagation completed')

    # Guided Grad cam
    cam_gb = guided_grad_cam(cam, guided_grads)
    save_gradient_images(cam_gb, file_name_to_export + '_GGrad_Cam')
    grayscale_cam_gb = convert_to_grayscale(cam_gb)
    save_gradient_images(grayscale_cam_gb, file_name_to_export + '_GGrad_Cam_gray')
    print('Guided grad cam completed')