Example #1
0
def test():
    # Build the VGG16 network with ImageNet weights
    model = VGG16(weights='imagenet', include_top=True)

    # Utility to search for layer index by name.
    # Alternatively we can specify this as -1 since it corresponds to the last layer.
    layer_idx = utils.find_layer_idx(model, 'predictions')

    # Swap softmax with linear
    model.layers[layer_idx].activation = activations.linear
    model = utils.apply_modifications(model)

    plt.rcParams['figure.figsize'] = (18, 6)

    img1 = utils.load_img('images/ouzel1.jpg', target_size=(224, 224))
    img2 = utils.load_img('images/ouzel2.jpg', target_size=(224, 224))

    # f, ax = plt.subplots(1, 2)
    # ax[0].imshow(img1)
    # ax[1].imshow(img2)

    f, ax = plt.subplots(1, 2)

    for i, img in enumerate([img1, img2]):
        # 20 is the imagenet index corresponding to `ouzel`
        # heatmap = saliency.visualize_cam(model, layer_idx, filter_indices=20, seed_input=img,backprop_modifier='guided')
        heatmap = saliency.visualize_saliency(model, layer_idx, filter_indices=20, seed_input=img,backprop_modifier=None)
        print (np.shape(heatmap))
        # Lets overlay the heatmap onto original image.
        ax[i].imshow(overlay(heatmap,img))

    plt.show()
Example #2
0
def test_for_issues_135():
    inputs = Input((35, ))
    x = Dense(64, activation='relu')(inputs)
    x = Dense(100, activation='relu')(x)
    x = Dense(50)(x)
    model = Model(inputs, x)
    data = np.random.rand(1, 35)
    grads = visualize_saliency(model, -1, 0, data, keepdims=True)
    assert grads.shape == (35, )
Example #3
0
def test_visualize_saliency_with_unkeepdims(model, data):
    grads = visualize_saliency(model, -1, 0, data, keepdims=True)
    assert grads.shape == (28, 28, 3)
Example #4
0
def test_visualize_saliency(model, data):
    # FIXME Can't set None to filter_indices with Theano backend.
    # To get green test, it set zero.
    # grads = visualize_saliency(model, -1, filter_indices=None, seed_input=data)
    grads = visualize_saliency(model, -1, filter_indices=0, seed_input=data)
    assert grads.shape == (28, 28)
Example #5
0
        self = super().load(path)
        self.model.layers[-1].activation = keras.activations.linear
        self.model = vu.utils.apply_modifications(self.model)
        return self

    def get_validation_data(
            self, dataset: som_dataset.SOMDataset) -> som_dataset.SOMDataset:
        return dataset.filter(labels=self.data_ids["validation"])

    def transform(self, case, group, maximization=False):
        """Get saliency gradients for the given group for the selected case."""
        xdata, _ = self.array_from_cases([case])
        input_indices = [*range(len(xdata))]
        gradients = visualize_saliency(self.model,
                                       self.layer_idx,
                                       self.config.groups.index(group),
                                       seed_input=xdata,
                                       input_indices=input_indices,
                                       maximization=maximization)
        return gradients

    def calculate_saliency(self,
                           som_sequence,
                           case,
                           group,
                           maximization=False):
        """Calculates the saliency values / gradients for the case, model and
        each of the classes.
        Args:
            dataset: SOMMapDataset object.
            case: Case object for which the saliency values will be computed.
            group: Select group.