Exemple #1
0
def test1DSoftmax():

    # weight = [-30,-20,-10,0];
    # bias = [60,50,30,0];
    weight = [-5, 0]
    bias = [5, 0]
    softClass = 0
    low = 0
    high = 5
    res = 100

    #Define Likelihood Model
    a = Softmax(weight, bias)

    #build a prior gaussian
    prior = GM([2, 4], [1, 0.5], [1, 0.5])

    #Get the posterior
    post = a.runVB(prior, softClassNum=softClass)

    a.plot1D(res=res, low=0, high=5)

    #Plot Everything
    [x0, classes] = a.plot1D(res=res, vis=False)
    [x1, numApprox] = a.numericalProduct(prior,
                                         softClass,
                                         low=low,
                                         high=high,
                                         res=res,
                                         vis=False)

    softClassLabels = ['Far left', 'Left', 'Far Right', 'Right']
    labels = ['likelihood', 'prior', 'VB Posterior', 'Numerical Posterior']
    [x2, pri] = prior.plot(low=low, high=high, num=res, vis=False)
    [x3, pos] = post.plot(low=low, high=high, num=res, vis=False)
    plt.plot(x0, classes[softClass])
    plt.plot(x2, pri)
    plt.plot(x3, pos)
    plt.plot(x1, numApprox)
    plt.ylim([0, 1.1])
    plt.xlim([low, high])
    plt.title("Fusion of prior with: " + softClassLabels[softClass])
    plt.legend(labels)
    plt.show()
	print(i.action); 
	i.plot(low=-20,high=20); 
'''

xs = [i for i in range(-20, 21)]

gs = [0] * len(xs)
for i in range(0, len(xs)):
    gs[i] = GM()
    gs[i].addG(Gaussian(i - 20, 5, 1))
acts = [-1] * len(xs)
for i in range(0, len(xs)):
    acts[i] = getAction(gs[i], Gamma)

print(acts)
'''
al1 = [GM()]; 
al1 = copy.deepcopy(r[2]); 
al1.scalerMultiply(1/(.1)); 
al1 = [al1]; 

al2 = backup(al1,modes,delA,delAVar,pz,r,maxMix,b)

al2.plot(low=-20,high=20); 
'''
'''
b.plot(low = -20,high=20); 
b = beliefUpdate(modes,delA,delAVar,pz,b,act,obs,5); 
b.plot(low = -20,high=20); 
'''
Exemple #3
0
def testVL():

    #plotting parameters
    low = 0
    high = 5
    res = 100

    #Define Likelihood Model
    weight = [-30, -20, -10, 0]
    bias = [60, 50, 30, 0]
    softClass = 1
    likelihood = Softmax(weight, bias)

    #Define Prior
    prior = GM()
    prior.addG(Gaussian(3, 0.25, 1))
    startTime = time.clock()
    postVL = VL(prior, likelihood, softClass, low, high, res)
    timeVL = time.clock() - startTime
    postVB = likelihood.runVB(prior, softClassNum=softClass)
    timeVB = time.clock() - timeVL

    #Normalize postVB
    #postVB.normalizeWeights();

    #share weights
    #postVL[0].weight = postVB[0].weight;
    postVB[0].weight = 1

    [x0, classes] = likelihood.plot1D(res=res, vis=False)
    [x1, numApprox] = likelihood.numericalProduct(prior,
                                                  softClass,
                                                  low=low,
                                                  high=high,
                                                  res=res,
                                                  vis=False)

    softClassLabels = ['Far left', 'Left', 'Far Right', 'Right']
    labels = [
        'likelihood', 'prior', 'Normed VB Posterior', 'Normed VL Posterior',
        'Numerical Posterior', 'Normed True Posterior'
    ]
    [x2, pri] = prior.plot(low=low, high=high, num=res, vis=False)
    [x3, pos] = postVB.plot(low=low, high=high, num=res, vis=False)
    [x4, pos2] = postVL.plot(low=low, high=high, num=res, vis=False)
    plt.plot(x0, classes[softClass])
    plt.plot(x2, pri)
    plt.plot(x3, pos)
    plt.plot(x4, pos2)
    plt.plot(x1, numApprox)
    plt.ylim([0, 3.1])
    plt.xlim([low, high])
    plt.title("Fusion of prior with: " + softClassLabels[softClass])

    SSE_VL = 0
    SSE_VB = 0
    for i in range(0, len(numApprox)):
        SSE_VL += (numApprox[i] - pos2[i])**2
        SSE_VB += (numApprox[i] - pos[i])**2

    var_VL = postVL.getVars()[0]
    var_VB = postVB.getVars()[0]
    var_True = 0
    mean_True = 0

    mean_True = x1[numApprox.index(max(numApprox))]
    for i in range(0, len(numApprox)):
        var_True += numApprox[i] * (x1[i] - mean_True)**2

    TruePostNorm = GM()
    TruePostNorm.addG(Gaussian(mean_True, var_True, 1))
    [x5, postTrue] = TruePostNorm.plot(low=low, high=high, num=res, vis=False)
    plt.plot(x5, postTrue)

    print("Variational Laplace:")
    print("Time: " + str(timeVL))
    print("Mean Error: " + str(postVL.getMeans()[0] - mean_True))
    print("Variance Error: " + str(postVL.getVars()[0] - var_True))
    print("ISD from Normed True: " + str(TruePostNorm.ISD(postVL)))
    print("")

    print("Variational Bayes:")
    print("Time: " + str(timeVB))
    print("Mean Error: " + str(postVB.getMeans()[0] - mean_True))
    print("Variance Error: " + str(postVB.getVars()[0] - var_True))
    print("ISD from Normed True: " + str(TruePostNorm.ISD(postVB)))
    print("")

    print("Time Ratio (L/B): " + str(timeVL / timeVB))

    plt.legend(labels)
    plt.show()
Exemple #4
0
alpha1 = GM(); 
alpha1.addG(Gaussian(1,1,2)); 
alpha1.addG(Gaussian(2,0.5,-1)); 

alpha2 = GM(); 
alpha2.addG(Gaussian(3,0.25,0.5)); 
alpha2.addG(Gaussian(4,0.5,-1)); 

alpha3 = GM(); 
alpha3.addG(Gaussian(5,2,2)); 
alpha3.addG(Gaussian(3,0.5,-1)); 



[x,c] = alpha1.plot(low=0,high=5,vis = False); 

[x2,c2] = alpha2.plot(low=0,high=5,vis = False); 

[x3,c3] = alpha3.plot(low=0,high=5,vis = False); 


plt.plot(x,c,linewidth=5,color='r');
plt.plot(x2,c2,linewidth=5,color='g');
plt.plot(x3,c3,linewidth=5,color='y');

plt.axhline(y=0, xmin=0, xmax=5, linewidth=2, color = 'k')
plt.title('Alpha Functions',fontsize=30); 
plt.xlabel('Position',fontsize = 20); 
plt.ylabel('Value',fontsize=20); 
plt.legend(['Move Right','Stay','Move Left']);