Example #1
0
File: sums.py Project: dkasak/pacal
S = [0]*n
t0 = time.time()

#show()
#A = UniformDistr(0,1, sym = "A")
#B = UniformDistr(0,1, sym = "B")
for i in range(n):
    print "X{}".format(i)
    X[i] = BetaDistr(2, 2, sym = "X{}".format(i))
    if i==0:
        S[i] = X[0]        
    else:
        S[i] = S[i-1] + X[i]
        S[i].setSym("S{}".format(i))

M = Model(X, S[1:])
print M
M.toGraphwiz()
#M = M.inference([S[-1], S[-4]], [S[-3]], [1])
#M = M.inference([X[0], X[1]], [S[-1]], [3.5])
print "===================="
M1 = M.inference(wanted_rvs =[X[0], X[1]], cond_rvs=[S[-1]], cond_X=[1])
print "====================",M1
M2 = M.inference(wanted_rvs =[S[1], S[4]])
print "====================",M2
M3 = M.inference(wanted_rvs =[S[1], S[4]], cond_rvs=[S[3]], cond_X=[2])
print "====================",M3
MC_X0 = M.inference(wanted_rvs =[X[0]], cond_rvs=[S[-1]], cond_X=[1])
print "===================="

print M1
Example #2
0
    # U will be conditioned on, so in effect constant
    if i==0:
        Y.append(Y0 * K+ h*U[i])
    else:
        Y.append(Y[i-1] * K+ h*U[i])
    Y[-1].setSym("Y" + str(i+1))  
    ei = NormalDistr(0.0, 0.1) | Between(-0.4, 0.4)
    ei.setSym("E{0}".format(i))
    E.append(ei)
    O.append(Y[-1] + E[-1])
    O[-1].setSym("O{0}".format(i))
#! 
#! Model
#! -----
P = NDProductDistr([A, Y0] + E + U)
M = Model(P, O)
print M
M.eliminate_other(E + Y + O + [A, Y0] + U)
print M
M.toGraphwiz(f=open('bn.dot', mode="w+"))

#!
#! Joint distribution of initial condition and parameter of equation
#! -----------------------------------------------------------------

i = 0
ay0 = []
ui = [0.0]*n
figure()
for yend in [0.25, 1.25, 2.25]:
    M2 = M.inference(wanted_rvs=[A, Y0], cond_rvs=[O[-1]] + U, cond_X=[yend] + ui)
Example #3
0
Y0 = BetaDistr(2, 2, sym="Y0")
n = 5
h = 1.0/n


K = (1 + h*A)
K.setSym("K") 
Y = [Y0]*(n+1)
for i in xrange(n+1):
    if i==0:
        pass
    else:
        Y[i] = Y[i-1] * K
        Y[i].setSym("Y" + str(i))  
P = NDProductDistr([Factor1DDistr(A), Factor1DDistr(Y[0])])
M = Model(P, Y[1:])
M.eliminate_other([K] + Y)

#M2 = M.inference2([Y[0], A], [Y[n]], [1])
#M2.plot(); print M2; show()
#M2 = M.inference2([Y[0]], [Y[n]], [0.5])
#figure()
#M2.plot(); print M2; 
figure()
Y[-1].plot(color='r',linewidth=5)
M3 = M.inference([Y[-1]], [], [])

M3.plot(); print M3; 

X0 = BetaDistr(2, 2)
y = X0 * exp(A) 
Example #4
0
Y = []                          # list of states
O, E, U = [], [], []            # lists of observations and errors
for i in xrange(n):
    U.append(UniformDistr(-0.2, 0.2, sym="U{0}".format(i)))
    if i == 0:
        Y.append(Y0 * K + U[i])
    else:
        Y.append(Y[i - 1] * K + U[i])
    Y[i].setSym("Y" + str(i + 1))  
    ei = NormalDistr(0.05, 0.1) | Between(-0.4, 0.4)
    ei.setSym("E{0}".format(i))
    E.append(ei)
    O.append(Y[-1] + E[-1])
    O[-1].setSym("O{0}".format(i))
    #print O[-1].range(), O[-1].range_()
M = Model(U + [Y0] + E, Y + O)
print M
M.toGraphwiz(f=open('bn.dot', mode="w+"))
#!
#! Simulation with signal filtering
#! --------------------------------
nT = 100
u = zeros(nT)
t = zeros(nT)
Yorg = zeros(nT)
Ynoised = zeros(nT)
Ydenoised = zeros(nT)
Udenoised = zeros(nT)
yi = 0.0
ydenoise = 0.0
ynoise = 0.0
Example #5
0
#A = UniformDistr(0,1, sym = "A")
#B = UniformDistr(0,1, sym = "B")
W = [BetaDistr(3, 3, sym="W" + str(i)) for i in xrange(m + 1)]
for i in range(n):
    #X.append(UniformDistr(0, 1, sym = "X{}".format(i)))
    xind = len(X)
    Yi = W[0]
    for j in xrange(m):
        X.append(BetaDistr(3, 3, sym="X{}{}".format(i, j)))
        Yi += W[j + 1] * X[-1]
    E.append(MollifierDistr(0.4, sym="E{}".format(i)))
    Y.append(Yi + E[-1])
    Y[-1].setSym("Y{}".format(i))

M = Model(X + E + W, Y)
#M.eliminate_other(X + E + [A, B] + Y)
#M.toGraphwiz()

Xobs = []
Yobs = []
trueW = [0.3, 0.9, 0.5, 0.6]
k = 0
for i in xrange(n):
    yi = trueW[0]
    for j in xrange(m):
        Xobs.append(X[k].rand())
        k += 1
        yi += trueW[j + 1] * Xobs[-1]
    yi += E[i].rand()
    Yobs.append(yi)
Example #6
0
X2 = BetaDistr(4, 4, sym="X2")
Y1 = BetaDistr(4, 4, sym="Y1")
Y2 = BetaDistr(4, 4, sym="Y2")

C1 = FrankCopula2d(theta=5, marginals=[X1, X2])
C2 = FrankCopula2d(theta=2, marginals=[Y1, Y2])
C1.contour()
C2.contour()
figure()

# C1 = FrankCopula2d(theta=2, marginals=[X1, X2])
# C2 = FrankCopula2d(theta=2.5, marginals=[Y1, Y2])

#C1 = GumbelCopula2d(theta=2, marginals=[X1, X2])
#C1 = PiCopula(marginals=[X1, X2])

Z1 = X1 + Y1
Z1.setSym("Z1")
Z2 = X2 + Y2
Z2.setSym("Z2")

M = Model([C1, C2], [Z1, Z2])
#M = Model([C1,Y1,Y2], [Z1, Z2])
#M = Model([X1,X2,Y1,Y2], [Z1, Z2])
print(M)

M2 = M.inference([Z1, Z2])
print(M2)
M2.plot()
show()
Example #7
0
    # U will be conditioned on, so in effect constant
    if i == 0:
        Y.append(Y0 * K + h * U[i])
    else:
        Y.append(Y[i - 1] * K + h * U[i])
    Y[-1].setSym("Y" + str(i + 1))
    ei = NormalDistr(0.0, 0.1) | Between(-0.4, 0.4)
    ei.setSym("E{0}".format(i))
    E.append(ei)
    O.append(Y[-1] + E[-1])
    O[-1].setSym("O{0}".format(i))
#!
#! Model
#! -----
P = NDProductDistr([A, Y0] + E + U)
M = Model(P, O)
print M
M.eliminate_other(E + Y + O + [A, Y0] + U)
print M
M.toGraphwiz(f=open('bn.dot', mode="w+"))

#!
#! Joint distribution of initial condition and parameter of equation
#! -----------------------------------------------------------------

i = 0
ay0 = []
ui = [0.0] * n
figure()
for yend in [0.25, 1.25, 2.25]:
    M2 = M.inference(wanted_rvs=[A, Y0],
Example #8
0
Y = []  # list of states
O, E, U = [], [], []  # lists of observations and errors
for i in xrange(n):
    U.append(UniformDistr(-0.2, 0.2, sym="U{0}".format(i)))
    if i == 0:
        Y.append(Y0 * K + U[i])
    else:
        Y.append(Y[i - 1] * K + U[i])
    Y[i].setSym("Y" + str(i + 1))
    ei = NormalDistr(0.05, 0.1) | Between(-0.4, 0.4)
    ei.setSym("E{0}".format(i))
    E.append(ei)
    O.append(Y[-1] + E[-1])
    O[-1].setSym("O{0}".format(i))
    #print O[-1].range(), O[-1].range_()
M = Model(U + [Y0] + E, Y + O)
print M
M.toGraphwiz(f=open('bn.dot', mode="w+"))
#!
#! Simulation with signal filtering
#! --------------------------------
nT = 100
u = zeros(nT)
t = zeros(nT)
Yorg = zeros(nT)
Ynoised = zeros(nT)
Ydenoised = zeros(nT)
Udenoised = zeros(nT)
yi = 0.0
ydenoise = 0.0
ynoise = 0.0
Example #9
0
X2 = BetaDistr(4,4, sym="X2")
Y1 = BetaDistr(4,4, sym="Y1")
Y2 = BetaDistr(4,4, sym="Y2")


C1 = FrankCopula2d(theta=5, marginals=[X1, X2])
C2 = FrankCopula2d(theta=2, marginals=[Y1, Y2])
C1.contour()
C2.contour()
figure()

# C1 = FrankCopula2d(theta=2, marginals=[X1, X2])
# C2 = FrankCopula2d(theta=2.5, marginals=[Y1, Y2])

#C1 = GumbelCopula2d(theta=2, marginals=[X1, X2])
#C1 = PiCopula(marginals=[X1, X2])


Z1 = X1 + Y1; Z1.setSym("Z1")
Z2 = X2 + Y2; Z2.setSym("Z2")

M = Model([C1,C2], [Z1, Z2])
#M = Model([C1,Y1,Y2], [Z1, Z2])
#M = Model([X1,X2,Y1,Y2], [Z1, Z2])
print(M)

M2 = M.inference([Z1, Z2])
print(M2)
M2.plot()
show()
Example #10
0
#A = UniformDistr(0,1, sym = "A")
#B = UniformDistr(0,1, sym = "B")
W = [BetaDistr(3,3, sym = "W" + str(i)) for i in xrange(m + 1)]
for i in range(n):
    #X.append(UniformDistr(0, 1, sym = "X{}".format(i)))
    xind = len(X)
    Yi = W[0]
    for j in xrange(m):
        X.append(BetaDistr(3, 3, sym = "X{}{}".format(i,j)))
        Yi += W[j+1] * X[-1]
    E.append(MollifierDistr(0.4, sym = "E{}".format(i)))
    Y.append(Yi + E[-1])
    Y[-1].setSym("Y{}".format(i))

M = Model(X + E + W, Y)
#M.eliminate_other(X + E + [A, B] + Y)
#M.toGraphwiz()


Xobs = []
Yobs = []
trueW = [0.3, 0.9, 0.5, 0.6]
k = 0
for i in xrange(n):
    yi = trueW[0]
    for j in xrange(m):
        Xobs.append(X[k].rand())
        k += 1
        yi += trueW[j+1] * Xobs[-1]
    yi += E[i].rand()
Example #11
0
S = [0]*n
t0 = time.time()

#show()
#A = UniformDistr(0,1, sym = "A")
#B = UniformDistr(0,1, sym = "B")
for i in range(n):
    print("X{}".format(i))
    X[i] = BetaDistr(2, 2, sym = "X{}".format(i))
    if i==0:
        S[i] = X[0]        
    else:
        S[i] = S[i-1] + X[i]
        S[i].setSym("S{}".format(i))

M = Model(X, S[1:])
print(M)
M.toGraphwiz()
#M = M.inference([S[-1], S[-4]], [S[-3]], [1])
#M = M.inference([X[0], X[1]], [S[-1]], [3.5])
print("====================")
M1 = M.inference(wanted_rvs =[X[0], X[1]], cond_rvs=[S[-1]], cond_X=[1])
print("====================",M1)
M2 = M.inference(wanted_rvs =[S[1], S[4]])
print("====================",M2)
M3 = M.inference(wanted_rvs =[S[1], S[4]], cond_rvs=[S[3]], cond_X=[2])
print("====================",M3)
MC_X0 = M.inference(wanted_rvs =[X[0]], cond_rvs=[S[-1]], cond_X=[1])
print("====================")

print(M1)
Example #12
0
B = BetaDistr(2, 4, sym="A")
Y0 = BetaDistr(2, 2, sym="Y0")
n = 5
h = 1.0 / n

K = (1 + h * A)
K.setSym("K")
Y = [Y0] * (n + 1)
for i in xrange(n + 1):
    if i == 0:
        pass
    else:
        Y[i] = Y[i - 1] * K
        Y[i].setSym("Y" + str(i))
P = NDProductDistr([Factor1DDistr(A), Factor1DDistr(Y[0])])
M = Model(P, Y[1:])
M.eliminate_other([K] + Y)

#M2 = M.inference2([Y[0], A], [Y[n]], [1])
#M2.plot(); print M2; show()
#M2 = M.inference2([Y[0]], [Y[n]], [0.5])
#figure()
#M2.plot(); print M2;
figure()
Y[-1].plot(color='r', linewidth=5)
M3 = M.inference([Y[-1]], [], [])

M3.plot()
print M3

X0 = BetaDistr(2, 2)
Example #13
0
h=0.1
for i in range(n):
    print(i)
    if i==0:
        X[i] = BetaDistr(3, 3, sym = "X0")
        Y[i] = BetaDistr(3, 3, sym = "Y0")
    else:
        X[i] = X[i-1] + h*(A*X[i-1] - B*Y[i-1])
        Y[i] = Y[i-1] + h*(-C*X[i-1] + D*Y[i-1])
        
        X[i].setSym("X{}".format(i))
        Y[i].setSym("Y{}".format(i))



M = Model([X[0], Y[0], A, B, C, D], X[1:] + Y[1:] )
print(M)
M.eliminate_other([X[0], Y[0], A, B, C, D] + X[1:] + Y[1:])
print(M)
print(Y[1].range())

#M1 = M.inference([X[2], Y[2]], [X[0], Y[0]], [0.5, 0.2])\
figure()
    
for i in range(n):
    M1 = M.inference([X[i], Y[i]], [A, B, C, D], [0.9, 0.2, 0.3, 0.6])
    print(M1)
    M1.plot(cont_levels=1)
    #figure()
    #M1.plot(have_3d=True)
show()
Example #14
0
h=0.1
for i in range(n):
    print i
    if i==0:
        X[i] = BetaDistr(3, 3, sym = "X0")
        Y[i] = BetaDistr(3, 3, sym = "Y0")
    else:
        X[i] = X[i-1] + h*(A*X[i-1] - B*Y[i-1])
        Y[i] = Y[i-1] + h*(-C*X[i-1] + D*Y[i-1])
        
        X[i].setSym("X{}".format(i))
        Y[i].setSym("Y{}".format(i))



M = Model([X[0], Y[0], A, B, C, D], X[1:] + Y[1:] )
print M
M.eliminate_other([X[0], Y[0], A, B, C, D] + X[1:] + Y[1:])
print M
print Y[1].range()

#M1 = M.inference([X[2], Y[2]], [X[0], Y[0]], [0.5, 0.2])\
figure()
    
for i in range(n):
    M1 = M.inference([X[i], Y[i]], [A, B, C, D], [0.9, 0.2, 0.3, 0.6])
    print M1
    M1.plot(cont_levels=1)
    #figure()
    #M1.plot(have_3d=True)
show()