Beispiel #1
0
def stretch1DModel():
    steep = 5
    weight = (np.array([-3, -2, -1, 0]) * steep).tolist()
    bias = (np.array([6, 5, 3, 0]) * steep).tolist()

    low = 0
    high = 5
    res = 100

    #Define Likelihood Model
    a = Softmax(weight, bias)
    a.plot1D(low=low, high=high)

    #weight = (np.array([[-1,0],[0,0]])*steep).tolist();
    #bias = (np.array([3,0])*steep).tolist();
    for i in range(0, len(weight)):
        weight[i] = [weight[i], 0]

    b = Softmax(weight, bias)
    b.plot2D(low=[0, 0], high=[5, 5])
Beispiel #2
0
def testVL():

    #plotting parameters
    low = 0
    high = 5
    res = 100

    #Define Likelihood Model
    weight = [-30, -20, -10, 0]
    bias = [60, 50, 30, 0]
    softClass = 1
    likelihood = Softmax(weight, bias)

    #Define Prior
    prior = GM()
    prior.addG(Gaussian(3, 0.25, 1))
    startTime = time.clock()
    postVL = VL(prior, likelihood, softClass, low, high, res)
    timeVL = time.clock() - startTime
    postVB = likelihood.runVB(prior, softClassNum=softClass)
    timeVB = time.clock() - timeVL

    #Normalize postVB
    #postVB.normalizeWeights();

    #share weights
    #postVL[0].weight = postVB[0].weight;
    postVB[0].weight = 1

    [x0, classes] = likelihood.plot1D(res=res, vis=False)
    [x1, numApprox] = likelihood.numericalProduct(prior,
                                                  softClass,
                                                  low=low,
                                                  high=high,
                                                  res=res,
                                                  vis=False)

    softClassLabels = ['Far left', 'Left', 'Far Right', 'Right']
    labels = [
        'likelihood', 'prior', 'Normed VB Posterior', 'Normed VL Posterior',
        'Numerical Posterior', 'Normed True Posterior'
    ]
    [x2, pri] = prior.plot(low=low, high=high, num=res, vis=False)
    [x3, pos] = postVB.plot(low=low, high=high, num=res, vis=False)
    [x4, pos2] = postVL.plot(low=low, high=high, num=res, vis=False)
    plt.plot(x0, classes[softClass])
    plt.plot(x2, pri)
    plt.plot(x3, pos)
    plt.plot(x4, pos2)
    plt.plot(x1, numApprox)
    plt.ylim([0, 3.1])
    plt.xlim([low, high])
    plt.title("Fusion of prior with: " + softClassLabels[softClass])

    SSE_VL = 0
    SSE_VB = 0
    for i in range(0, len(numApprox)):
        SSE_VL += (numApprox[i] - pos2[i])**2
        SSE_VB += (numApprox[i] - pos[i])**2

    var_VL = postVL.getVars()[0]
    var_VB = postVB.getVars()[0]
    var_True = 0
    mean_True = 0

    mean_True = x1[numApprox.index(max(numApprox))]
    for i in range(0, len(numApprox)):
        var_True += numApprox[i] * (x1[i] - mean_True)**2

    TruePostNorm = GM()
    TruePostNorm.addG(Gaussian(mean_True, var_True, 1))
    [x5, postTrue] = TruePostNorm.plot(low=low, high=high, num=res, vis=False)
    plt.plot(x5, postTrue)

    print("Variational Laplace:")
    print("Time: " + str(timeVL))
    print("Mean Error: " + str(postVL.getMeans()[0] - mean_True))
    print("Variance Error: " + str(postVL.getVars()[0] - var_True))
    print("ISD from Normed True: " + str(TruePostNorm.ISD(postVL)))
    print("")

    print("Variational Bayes:")
    print("Time: " + str(timeVB))
    print("Mean Error: " + str(postVB.getMeans()[0] - mean_True))
    print("Variance Error: " + str(postVB.getVars()[0] - var_True))
    print("ISD from Normed True: " + str(TruePostNorm.ISD(postVB)))
    print("")

    print("Time Ratio (L/B): " + str(timeVL / timeVB))

    plt.legend(labels)
    plt.show()