Example #1
0
def plot_lrp(fig, im):
    global method
    
    typeMethod = method
    
    nnPred = nn.forward(np.array([im]))
    #lrpScores = nn.lrp(nnPred, 'alphabeta', 2)
    lrpScores = nn.lrp(nnPred, typeMethod, 2)
    if np.isnan(np.sum(lrpScores[0])):
        print('Warning: NaN values. Most likely because score after first layer'
              ' gives two negative numbers and you get division by 0.')
    caxNnPred = axNnPred.imshow(render.vec2im(lrpScores[0]), cmap='jet')
    fig.colorbar(caxNnPred, cax=axColormapLRP)
    infoNNPred = 'NN probabilities: square = {}, triangle = {}'.format(round(nnPred[0][0], 2), round(nnPred[0][1], 2))
    axNnPred.set_title(infoNNPred)
    
    print('Done plotting the LRP map')
    return(fig)    
Example #2
0
def plot_shape(val):
    global rotateIdx, shape    
    shapeColor = sShapeColor.val
    backgroundColor = sBackgroundColor.val

    im = np.array(uniqShapeRot[shape][rotateIdx])
    im[im == 1] = shapeColor
    im[im == 0] = backgroundColor    

    info = 'shape color = {}, background color = {}, rotateIdx = {}, shape = {}.'.format(shapeColor, backgroundColor, rotateIdx, shape)
    fig.suptitle(info)


    axOrigImage.imshow(render.vec2im(im), cmap='gray_r', vmin=0, vmax=1)

    print('Done plotting the shape')
    
    return(fig, im)
Example #3
0
y = Y['train'][[idx]]
nnPred = nn.forward(x)
# print nnPred
# lrpScores = nn.lrp(nnPred, 'alphabeta', 2)
# print np.sum(lrpScores)
#plt.matshow(render.vec2im(x[0] + innerCircleSq))

# inspect first linear layer
# --------------------------

W1 = nn.modules[0].W
B = nn.modules[0].B
dim = int(W1.shape[1])
W11 = W1[:, 0]
W12 = W1[:, 1]
W11Im = render.vec2im(W11)
W12Im = render.vec2im(W12)

# init figure
fig = plt.figure()
grid = axes_grid1.AxesGrid(
    fig,
    111,
    nrows_ncols=(1, dim),
    axes_pad=0.55,
    share_all=True,
    cbar_location="right",
    cbar_mode="each",
    cbar_size="5%",
    cbar_pad="2%",
)
Example #4
0
# load trained neural network (nn)
nnName = 'nn_Linear_1024_2_Rect_Linear_2_2_SoftMax_(batchsize_10_number_iterations_10000).txt'
nn = model_io.read(settings.modelPath + nnName)

# I do not want to load the data every time, therefore the if statement
if 'X' not in locals():
    # load data
    X, Y = data_loader.load_data()

# load first layer weights
W1 = nn.modules[0].W
B1 = nn.modules[0].B
dim = int(W1.shape[1])
W11 = W1[:, 0]
W12 = W1[:, 1]
W11Image = render.vec2im(W11)
W12Image = render.vec2im(W12)
n = len(W11)
idxMiddleImage = int(n / 2 + math.sqrt(n) / 2)

# calculate innercircles to find (hidden) relationships
innerCircleSq, innerCircleTr, ring = data_analysis.inner_circles()
unionSq, unionTr = data_analysis.union_shapes()
shapeAreaOutsideRing = unionSq - innerCircleSq
weightShapeAreaOutsideRing1 = W11.dot(shapeAreaOutsideRing)
weightShapeAreaOutsideRing2 = W12.dot(shapeAreaOutsideRing)

# unique shape rotation of squares and triangles
uniqShapeRot = data_analysis.unique_shapes()
uniqShapeRotSq = uniqShapeRot['square']
uniqShapeRotTr = uniqShapeRot['triangle']
Example #5
0
# lrp evaluation
nnPred = nn.forward(x)
relevanceValues = {
    'nn prediction': nnPred,
    'square': np.array([[1., 0.]]),
    'triangle': np.array([[0., 1.]])
}
lrpRelevance = {}
for idx, (key, relVal) in enumerate(relevanceValues.items()):

    # find and save lrp relevance
    lrpRelevance[key] = nn.lrp(relVal, 'alphabeta', 2)

    # generate compound heatmap
    hmComp, R = render.hm_to_rgb(render.vec2im(lrpRelevance[key]),
                                 X=render.vec2im(x),
                                 scaling=3,
                                 shape=(),
                                 sigma=2,
                                 cmap='jet',
                                 normalize=True)

    # plot result
    axes[idx].set_title(key + ': ' + str(np.round(relVal[0], 2)))
    img = axes[idx].imshow(hmComp)
    axes[idx].axis('off')
    fig.colorbar(img, ax=axes[idx])

# just a simple plot of weights without normalization
titlePlot = 'LRP Heatmaps for Varying Choices (Without Scaling at SoftMax Layer)'