def historico(estados_iniciais, grafo, doenca, k):
    """Retorna uma lista com o historico do numero de nos em cada um dos estados
	de acordo com o modelo sir
	Parametros
	----------
	estados iniciais : matriz 
	Informa o estado inicial de cada um dos nos
	grafo : matriz 
	Matriz de adjacencia da rede complexa
	doenca : vetor 
	Vetor com o os valores dos diferentes estagios da doenca
	k: float
	Coeficiente relativo de infectuosidade 
	"""
    L = []
    i = 0
    estados = deepcopy(estados_iniciais)
    count = sir.sir(estados_iniciais)
    estados = new_states.passo_epidemico(estados_iniciais, grafo, doenca, k)
    while (count.item(1) > 0):
        i = i + 1
        L.append(count.transpose().tolist())
        estados = new_states.passo_epidemico(estados, grafo, doenca, k)
        count = sir.sir(estados)

    L.append(count.transpose().tolist())
    print("retornou " + str(i))
    return i, L
Example #2
0
size = 25

model = "SIR"

series = []
grave = []

rule = np.arange(0, turns + 1, step=crit)

while p <= 1:
    ift = []
    rmd = []
    fig, ax = plt.subplots(2, 1)
    for ind in range(size):
        covid = sir(p, i, r, graph)
        print("Epoch: {}/{}".format(ind + 1, size))

        infect, removed = covid.start(turns, initInfect, crit)
        ift.append(infect)
        rmd.append(removed)

        ax[0].plot(rule, infect, linestyle='--', linewidth=0.5)
        ax[1].plot(rule, removed, linestyle='--', linewidth=0.5)

    ift = np.array(ift)
    rmd = np.array(rmd)
    ax[0].plot(rule, np.average(ift, axis=0), linewidth=0.5)
    ax[1].plot(rule, np.average(rmd, axis=0), linewidth=0.5)
    plt.show()
    series.append(np.average(ift, axis=0))
import numpy as np
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import sir

#Example script running the simulation

N=100000.0
rho=1.0
beta=1.75
gamma=0.5
omega=0.01
delta=2*gamma
psi=beta

ts=np.linspace(0,2000,2000)
S,I,R,PS,PI=sir.sir(N,rho,beta,gamma,omega,delta,psi,ts,[N-1,1.0,0.0,0.0,0.0])

plt.plot(I+PI)
plt.savefig("test.svg")
Example #4
0
def main():
    # number of particles
    Nsu = 100

    SIR = sir.sir(2,Nsu,propagateFunction,processNoise,measurementPdf)

    print("SIR.is_init: %d" % (SIR.initFlag))
    SIR.init(initialParticle)
    print("SIR.is_init: %d" % (SIR.initFlag))

    tf = 20.0
    dt = 0.1
    nSteps = int(tf/dt)
    xsim = initialParticle()
    tsim = 0.0

    px1 = np.zeros((nSteps,SIR.Ns))
    px2 = np.zeros((nSteps,SIR.Ns))
    weights = np.zeros((nSteps,SIR.Ns))
    xksim = np.zeros((nSteps,2))

    for k in range(nSteps):
        # ode integration
        simout = sp.odeint(eqom,xsim,np.array([tsim,tsim+dt]))
        # store new sim state
        xsim = simout[-1,:].transpose()
        # update time
        tsim = tsim + dt
        # generate a measurement
        yt = measurement(xsim)
        # call SIR
        SIR.update(dt,yt)
        # compute Neff
        print("%f,%f,%f,%f" % (tsim,yt[0],xsim[0],xsim[1]))
        # store
        px1[k,:] = SIR.XK[0,:].copy()
        px2[k,:] = SIR.XK[1,:].copy()
        weights[k,:] = SIR.WI.copy()
        xksim[k,:] = (xsim.transpose()).copy()
        # resample
        SIR.sample()
    tplot = np.arange(0.0,tf,dt)


    # len(tplot) x Ns matrix of times
    tMesh = np.kron(np.ones((SIR.Ns,1)),tplot).transpose()
    x1Mesh = px1.copy()
    x2Mesh = px2.copy()
    # sort out the most likely particle at each time
    xml = np.zeros((nSteps,2))
    for k in range(nSteps):
        idxk = np.argmax(weights[k,:])
        xml[k,0] = px1[k,idxk]
        xml[k,1] = px2[k,idxk]

    fig = plt.figure()

    ax = []
    for k in range(4):
        if k < 2:
            nam = 'x' + str(k+1)
        else:
            nam = 'pdf' + str(k-1)
        ax.append( fig.add_subplot(2,2,k+1,ylabel=nam) )
        if k < 2:
            ax[k].plot(tplot,xksim[:,k],'b-')
            if k == 0:
                #ax[k].plot(tplot,px1,'.')
                ax[k].plot(tplot,xml[:,k],'r.')
            elif k == 1:
                #ax[k].plot(tplot,px2,'.')
                ax[k].plot(tplot,xml[:,k],'r.')
        elif k < 4:
            if k == 2:
                # plot the discrete PDF as a function of time
                mex = tMesh.reshape((len(tplot)*SIR.Ns,))
                mey = x1Mesh.reshape((len(tplot)*SIR.Ns,))
                mez = weights.reshape((len(tplot)*SIR.Ns,))
            elif k == 3:
                # plot the discrete PDF as a function of time
                mex = tMesh.reshape((len(tplot)*SIR.Ns,))
                mey = x2Mesh.reshape((len(tplot)*SIR.Ns,))
                mez = weights.reshape((len(tplot)*SIR.Ns,))
            idx = mez.argsort()
            mexx,meyy,mezz = mex[idx],mey[idx],mez[idx]

            cc = ax[k].scatter(mexx,meyy,c=mezz,s=20,edgecolor='')
            fig.colorbar(cc,ax=ax[k])
            # plot the truth
            ax[k].plot(tplot,xksim[:,k-2],'b-')

        ax[k].grid()
    fig.show()

    raw_input('Return to continue')

    print("Exiting sir_test.py")
    return
Example #5
0
def sir_test(dt,tf,mux0,P0,YK,Qk,Rk,Nparticles = 100,informative=True):
	global nameBit
	global mux_sample
	global P_sample
	global Ru
	global Qu
	Ru = Rk.copy()
	Qu = Qk.copy()
	mux_sample = mux0.copy()
	P_sample = P0.copy()

	# number of particles
	Nsu = Nparticles

	if informative:
		SIR = sir.sir(2,Nsu,eqom_use,processNoise,measurementPdf)
	else:
		SIR = sir.sir(2,Nsu,eqom_use,processNoise,measurementPdfNoninformative)

	nSteps = int(tf/dt)+1
	ts = 0.0

	# initialize the particle filter
	SIR.init(initialParticle)
	
	# propagated particles
	Xp = np.zeros((nSteps,2,Nsu))
	# the weights
	weights = np.zeros((nSteps,SIR.Ns))
	# weights after resampling
	weightss = np.zeros((nSteps,SIR.Ns))

	weights[0,:] = SIR.WI.copy()
	Xp[0,:,:] = SIR.XK.copy()
	## particles after resampling
	Xs = np.zeros((nSteps,2,Nsu))

	t1 = time.time()
	for k in range(1,nSteps):
		# get the new measurement
		ym = np.array([YK[k]])
		ts = ts + dt
		# call SIR
		SIR.update(dt,ym,1.0e-2)
		print("Propagate to t = %f in %f sec" % (ts,time.time()-t1))
		# store
		weights[k,:] = SIR.WI.copy()
		Xp[k,:,:] = SIR.XK.copy()
		# resample
		SIR.sample()
		## store resampled points
		Xs[k,:,:] = SIR.XK.copy()
		weightss[k,:] = SIR.WI.copy()
	t2 = time.time()
	print("Elapsed time: %f sec" % (t2-t1))

	# compute the mean and covariance over time - this is the propagated state
	mux = np.zeros((nSteps,2))
	Pxx = np.zeros((nSteps,4))
	for k in range(nSteps):
		mux[k,0] = np.sum( np.multiply(Xp[k,0,:],weights[k,:]) )
		mux[k,1] = np.sum( np.multiply(Xp[k,1,:],weights[k,:]) )
		Pxk = np.zeros((2,2))
		for j in range(Nsu):
			iv = np.array([ Xp[k,0,j]-mux[k,0],Xp[k,1,j]-mux[k,1] ])
			Pxk = Pxk + weights[k,j]*np.outer(iv,iv)
			Pxx[k,:] = Pxk.reshape((4,))
	# compute the aposteriori mean
	muxs = np.zeros((nSteps,2))
	for k in range(nSteps):
		muxs[k,0] = np.sum( np.multiply(Xs[k,0,:],weightss[k,:]) )
		muxs[k,1] = np.sum( np.multiply(Xs[k,1,:],weightss[k,:]) )

	return(mux,Pxx,Xp,weights,Xs,muxs)
Example #6
0
def sir_test(dt,tf,mux0,P0,YK,Qk,Rk,Nparticles = 100):
	global nameBit
	global mux_sample
	global P_sample
	global Ru
	global Qu
	Ru = Rk.copy()
	Qu = Qk.copy()
	mux_sample = mux0.copy()
	P_sample = P0.copy()

	# number of particles
	Nsu = Nparticles

	# add in this functionality so we can change the propagation function dependent on the nameBit ... may or may not be needed
	if nameBit == 1:
		# create SIR object
		SIR = sir.sir(2,Nsu,eqom_use,processNoise,measurementPdf)
	elif nameBit == 2:
		# create SIR object
		SIR = sir.sir(2,Nsu,eqom_use,processNoise,measurementPdf)
	elif nameBit == 3:
		# create SIR object
		SIR = sir.sir(2,Nsu,eqom_use,processNoise,measurementPdf)

	nSteps = int(tf/dt)+1
	ts = 0.0

	# initialize the particle filter
	SIR.init(initialParticle)
	
	# the estimate (weighted mean)
	#xf = np.zeros((nSteps,2))
	#tk = np.arange(0.0,tf,dt)
	px1 = np.zeros((nSteps,SIR.Ns))
	px2 = np.zeros((nSteps,SIR.Ns))
	weights = np.zeros((nSteps,SIR.Ns))

	px1[0,:] = SIR.XK[0,:].copy()
	px2[0,:] = SIR.XK[1,:].copy()
	weights[0,:] = SIR.WI.copy()

	t1 = time.time()
	for k in range(1,nSteps):
		# get the new measurement
		ym = np.array([YK[k]])
		ts = ts + dt
		# call SIR
		SIR.update(dt,ym)
		# store
		px1[k,:] = SIR.XK[0,:].copy()
		px2[k,:] = SIR.XK[1,:].copy()
		weights[k,:] = SIR.WI.copy()
		# resample
		SIR.sample()
	t2 = time.time()
	print("Elapsed time: %f sec" % (t2-t1))

	# sort out the most likely particle at each time
	xml = np.zeros((nSteps,2))
	for k in range(nSteps):
		idxk = np.argmax(weights[k,:])
		xml[k,0] = px1[k,idxk]
		xml[k,1] = px2[k,idxk]
	# compute the mean and covariance over time
	mux = np.zeros((nSteps,2))
	Pxx = np.zeros((nSteps,4))
	for k in range(nSteps):
		mux[k,0] = np.sum( np.multiply(px1[k,:],weights[k,:]) )
		mux[k,1] = np.sum( np.multiply(px2[k,:],weights[k,:]) )
		Pxk = np.zeros((2,2))
		for j in range(Nsu):
			iv = np.array([ px1[k,j]-mux[k,0],px2[k,j]-mux[k,1] ])
			Pxk = Pxk + weights[k,j]*np.outer(iv,iv)
			Pxx[k,:] = Pxk.reshape((4,))

	return(mux,Pxx,px1,px2,weights)
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np
import sir

N = 100000.0
Ns = np.array([N / 3, N / 3, N / 3])
A = np.array([[1.0, 1.0, 1.0], [1.0, 5.0, 2.0], [1.0, 4.0, 10.0]])

rhos = np.array([0.1, 0.1, 0.1])
gamma = 0.5

ts = np.linspace(0, 365, 1000)
S0, S1, S2, I0, I1, I2, R = sir.sir(
    Ns, rhos, A, gamma, ts, [N / 3, N / 3 - 1, N / 3, 0.0, 1, 0.0, 0.0])

plt.plot(ts, I0 + I1 + I2, linewidth=3)
plt.plot(ts, I0, linewidth=3)
plt.plot(ts, I1, linewidth=3)
plt.plot(ts, I2, linewidth=3)
plt.legend(["Sum", "Low", "Mid", "High"])
plt.savefig("test.svg")

total = S0 + S1 + S2 + I0 + I1 + I2 + R
print("Mean: {},  std:  {}".format(np.mean(total), np.std(total)))