Ejemplo n.º 1
0
def samp_start_path(z1,edge_pos,v):

# [edgeidx,z] = samp_start_path(z1,edge_pos,v)
# Samples a starting edge
# Inputs:
# z1- 2-element measurement list or array
# edge_pos- candidate edges (a list of arrays containing the positions of the edge nodes)
# v- measurement noise variance
# Outputs:
# edgeidx- index of sampled edge (integer)
# z- distance along edge (float)

    nedge = len(edge_pos)
    
    x1 = z1[0]
    y1 = z1[1]
    sqv = math.sqrt(v)
    
    mui = numpy.zeros((nedge))
    elli = numpy.zeros((nedge))
    qrho = numpy.zeros((nedge))
    # Compute weights for each edge
    for i in range(nedge):
        xi = edge_pos[i][0,0]
        yi = edge_pos[i][1,0]
        xe = edge_pos[i][0,1]
        ye = edge_pos[i][1,1]
        thi = math.atan2(ye-yi,xe-xi)
        cth = math.cos(thi)
        sth = math.sin(thi)
        elli[i] = math.sqrt((xe-xi)**2+(ye-yi)**2)
        mui[i] = (x1-xi)*cth+(y1-yi)*sth
        P = mystats.normprob(mui[i],sqv,0,elli[i])
        if abs(cth)>1e-10:
            npdf = mystats.normpdf(y1-yi,(x1-xi)*sth/cth,sqv/abs(cth))/abs(cth)
        else:
            npdf = mystats.normpdf(x1-xi,(y1-yi)*cth/sth,sqv/abs(sth))/abs(sth)
        qrho[i] = P*npdf/elli[i]
    # Normalise weights
    w = sum(qrho)
    qrho = qrho/w
    # Draw an edge
    idx = mystats.drawmultinom(qrho)
    # Draw the distance along the edge
    z = mystats.tnorm(mui[idx],v,0,elli[idx])

    return idx, z
Ejemplo n.º 2
0
def draw_location(t,path_pos,sk,segk,skm1,segkm1,tk,tkm1,vmean,vvar):
    
# [ssamp,segidx,logw] =  
# draw_location(t,path_pos,sk,segk,skm1,segkm1,tk,tkm1,vmean,vvar):
# Sample a location between two points on the network
# Inputs:
# t- time at which sample is being drawn (float)
# path_pos- array of positions of path nodes (2 x q array of floats)
# (sk,segk)- position at time tk>t (float, integer)
# (skm1,segkm1)- position at time tkm1<t (float, integer)
# tk- next time (float)
# tkm1- previous time (float)
# (vmean,vvar)- - statistics of vehicle movement (floats)
# Outputs:
# ssamp- sample position on edge (float)
# segidx- index of sample along path (integer)
# logw- sample weight (float)
    

    # Pre-allocate
    nsegs = segk-segkm1+1 # This is the number of segments between the bracketing positions
    Tbar = numpy.zeros((nsegs))
    kap = numpy.zeros((nsegs))
    ell = numpy.zeros((nsegs)) # total length of each segment
    lb = numpy.zeros((nsegs))
    ub = numpy.zeros((nsegs))
    dist_cs = numpy.zeros((nsegs))
    
    totell = 0
    # For each segment, compute its length, mean duration and duration variance
    if nsegs==1:
        pos1 = path_pos[:,segkm1]
        pos2 = path_pos[:,segkm1+1]
        ell[0] = numpy.linalg.norm(pos1-pos2)
        Tbar[0] = ell[0]/vmean
        kap[0] = ell[0]*ell[0]*vvar/(vmean**4)
        lb[0] = skm1
        ub[0] = sk
        totell = sk-skm1
        dist_cs[0] = totell
    else:
        pos1 = path_pos[:,segkm1]
        pos2 = path_pos[:,segkm1+1]
        ell[0] = numpy.linalg.norm(pos1-pos2)
        Tbar[0] = ell[0]/vmean
        kap[0] = ell[0]*ell[0]*vvar/(vmean**4)
        lb[0] = skm1
        ub[0] = ell[0]
        totell = ell[0]-skm1
        dist_cs[0] = totell
        for j in range(1,nsegs-1):
            pos1 = path_pos[:,segkm1+j]
            pos2 = path_pos[:,segkm1+j+1]
            ell[j] = numpy.linalg.norm(pos1-pos2)
            Tbar[j] = ell[j]/vmean
            kap[j] = ell[j]*ell[j]*vvar/(vmean**4)
            lb[j] = 0
            ub[j] = ell[j]
            totell = totell+ell[j]
            dist_cs[j] = dist_cs[j-1]+ell[j]
        j = nsegs-1
        pos1 = path_pos[:,segk-1]
        pos2 = path_pos[:,segk]
        ell[j] = numpy.linalg.norm(pos1-pos2)
        Tbar[j] = ell[j]/vmean
        kap[j] = ell[j]*ell[j]*vvar/(vmean**4)
        lb[j] = 0
        ub[j] = sk
        totell = totell+sk
        dist_cs[j] = dist_cs[j-1]+sk
    
    
    shat = numpy.zeros((nsegs))
    lam = numpy.zeros((nsegs))
    etilde = numpy.zeros((nsegs))
    p2 = numpy.zeros((nsegs))
    xi1 = numpy.zeros((nsegs))
    xi2 = numpy.zeros((nsegs))
    a1 = numpy.zeros((nsegs))
    a2 = numpy.zeros((nsegs))
    Ttilde1 = numpy.zeros((nsegs))
    Ttilde2 = numpy.zeros((nsegs))
    scfact = numpy.zeros(nsegs)
    scfact[0] = (ell[0]-skm1)*(ell[0]-skm1)/(ell[0]*ell[0])
    for b in range(1,nsegs-1):
        scfact[b] = 1.
    scfact[nsegs-1] = sk*sk/(ell[nsegs-1]*ell[nsegs-1])
    
    # Compute the weights and sampling density for each segment
    # First segment
    a1[0] = -Tbar[0]/ell[0]
    Ttilde1[0] = sk*Tbar[nsegs-1]/ell[nsegs-1]
    for b in range(0,nsegs-1):
        Ttilde1[0] += Tbar[b]
    xi1[0] = kap[0]
    for b in range(1,nsegs):
        xi1[0] += scfact[b]*kap[b]
    a2[0] = Tbar[0]/ell[0]
    Ttilde2[0] = -skm1*Tbar[0]/ell[0]
    xi2[0] = kap[0]
    db = (a1[0]*a1[0]*xi2[0]+a2[0]*a2[0]*xi1[0])
    shat[0] = (a2[0]*xi1[0]*(t-tkm1-Ttilde2[0])+a1[0]*xi2[0]*(tk-t-Ttilde1[0]))/db
    lam[0] = xi1[0]*xi2[0]/db
    p1 = mystats.normpdf(a2[0]*(tk-t-Ttilde1[0]),a1[0]*(t-tkm1-Ttilde2[0]),math.sqrt(db))
    p2[0] = mystats.normprob(shat[0],math.sqrt(lam[0]),lb[0],ub[0])
    etilde[0] = p1*p2[0]
    for b in range(1,nsegs):
        a1[b] = -Tbar[b]/ell[b]
        a2[b] = Tbar[b]/ell[b]
        Ttilde2[b] = Ttilde2[b-1]+Tbar[b-1]
        Ttilde1[b] = Ttilde1[b-1]-Tbar[b-1]
        xi2[b] = xi2[b-1]+(scfact[b-1]-1)*kap[b-1]+kap[b]
        xi1[b] = xi1[b-1]-kap[b-1]+(1-scfact[b])*kap[b]
        db = (a1[b]*a1[b]*xi2[b]+a2[b]*a2[b]*xi1[b])
        shat[b] = (a2[b]*xi1[b]*(t-tkm1-Ttilde2[b])+a1[b]*xi2[b]*(tk-t-Ttilde1[b]))/db
        lam[b] = xi1[b]*xi2[b]/db
        p1 = mystats.normpdf(a2[b]*(tk-t-Ttilde1[b]),a1[b]*(t-tkm1-Ttilde2[b]),math.sqrt(db))
        p2[b] = mystats.normprob(shat[b],math.sqrt(lam[b]),lb[b],ub[b])
        etilde[b] = p1*p2[b]

    # Normalise weights
    e = etilde/numpy.sum(etilde)
    # Draw a segment
    segidx = mystats.drawmultinom(e)
    # Draw a position in the segment
    if segidx==0:
        z = mystats.tnorm(shat[segidx],lam[segidx],skm1,ell[segidx])
    elif segidx<(nsegs-1):
        z = mystats.tnorm(shat[segidx],lam[segidx],0,ell[segidx])
    else:
        z = mystats.tnorm(shat[segidx],lam[segidx],0,sk)
    
    # Sample weight (allow for approximate prior)
    T1 = Ttilde1[segidx]+a1[segidx]*z
    T2 = Ttilde2[segidx]+a2[segidx]*z
    kap2 = 0
    for b in range(0,segidx):
        kap2 = kap2+(ub[b]-lb[b])*(ub[b]-lb[b])*kap[b]/(ell[b]*ell[b])
    kap2 = kap2+(z-lb[segidx])*(z-lb[segidx])*kap[segidx]/(ell[segidx]*ell[segidx])
    kap1 = (ub[segidx]-z)*(ub[segidx]-z)*kap[segidx]/(ell[segidx]*ell[segidx])
    for b in range(segidx+1,nsegs):
        kap1 = kap1+(ub[b]-lb[b])*(ub[b]-lb[b])*kap[b]/(ell[b]*ell[b])
    logw = lognormpdf(tk-t,T1,kap1)+lognormpdf(t-tkm1,T2,kap2)
    logw = logw-(lognormpdf(tk-t,T1,xi1[segidx])+lognormpdf(t-tkm1,T2,xi2[segidx]))

    return z, segidx+segkm1, logw
Ejemplo n.º 3
0
def samp_path(z,t,cand_paths,turnpen,s0,v,vmean,vvar):

# [pathidx,segidx,z,logwt] = samp_path(z,t,cand_paths,turnpen,s0,v,vmean,vvar)
# Samples a path extension
# Inputs:
# z- 2-element measurement list or array of floats
# t- duration since last measurement 
# cand_paths- candidate paths (a list of arrays containing the positions of each node on the path)
# turnpen- turn penalty along each path
# s0- starting position in final segment of each candidate path
# v- measurement noise variance
# (vmean,vvar)- statistics of vehicle movement
# Outputs:
# pathidx- index of sampled path (integer)
# segidx- segment along path in which vehicle lies (integer)
# z- distance along segment (float)
# logwt- log of sample weight (float)

    npaths = len(cand_paths)
    sqv = math.sqrt(v)
    eps = numpy.spacing(1)
    
    # Pre-allocate
    x = z[0]
    y = z[1]
    gamma = numpy.zeros((npaths))
    C1 = numpy.zeros((npaths))
    qpath = numpy.zeros((npaths))
    pathprior = numpy.zeros((npaths))
    b = list(numpy.zeros((npaths)))
    nu = list(numpy.zeros((npaths)))
    zeta = list(numpy.zeros((npaths)))
    ell = list(numpy.zeros((npaths)))
    nsegs = [0 for i in range(npaths)]
    lb = [0 for i in range(npaths)]
    # Compute the weight of each path
    for i in range(npaths):
        sz_path = cand_paths[i].shape
        nsegs[i] = sz_path[1]-1
        if nsegs[i]==1:
            lb[i] = s0[i]
        else:
            lb[i] = 0.
        ell[i] = numpy.zeros(nsegs[i])
        # Calculate prior mean and variance
        # First segment (account for s0)
        pos1 = cand_paths[i][:,0]
        pos2 = cand_paths[i][:,1]
        ell[i][0] = numpy.linalg.norm(pos1-pos2)
        Tbarj = ell[i][0]/vmean
        kapj = ell[i][0]*ell[i][0]*vvar/(vmean**4)
        Ttilde = -s0[i]*Tbarj/ell[i][0]
        kaptilde = 0.
        c = (ell[i][0]-s0[i])/ell[i][0]
        mean_term = Tbarj
        var_term = c*c*kapj
        for j in range(1,nsegs[i]):
            pos1 = cand_paths[i][:,j]
            pos2 = cand_paths[i][:,j+1]
            ell[i][j] = numpy.linalg.norm(pos1-pos2)
            Ttilde += mean_term
            kaptilde += var_term
            Tbarj = ell[i][j]/vmean
            kapj = ell[i][j]*ell[i][j]*vvar/(vmean**4)
            mean_term = Tbarj
            var_term = kapj
        # Compute location sampling density and path weight
        xi = pos1[0]
        yi = pos1[1]
        xe = pos2[0]
        ye = pos2[1]
        thi = math.atan2(ye-yi,xe-xi)
        cth = math.cos(thi)
        sth = math.sin(thi)
        mu = (x-xi)*cth+(y-yi)*sth
        alph = ell[i][nsegs[i]-1]*(t-Ttilde)/Tbarj
        lam = ell[i][nsegs[i]-1]*ell[i][nsegs[i]-1]*(kaptilde+kapj)/(Tbarj*Tbarj)
        nu[i] = (v*alph+lam*mu)/(v+lam)
        zeta[i] = lam*v/(v+lam)
        if abs(cth)>1e-10:
            npdf1 = mystats.normpdf(y-yi,(x-xi)*sth/cth,sqv/abs(cth))/abs(cth)
        else:
            npdf1 = mystats.normpdf(x-xi,(y-yi)*cth/sth,sqv/abs(sth))/abs(sth)
        npdf2 = mystats.normpdf(mu,alph,math.sqrt(v+lam))
        sqzeta = math.sqrt(zeta[i])
        C2 = mystats.normprob(nu[i],sqzeta,lb[i],ell[i][nsegs[i]-1])
        sqlam = math.sqrt(lam)
        C1[i] = mystats.normprob(alph,sqlam,lb[i],ell[i][nsegs[i]-1])
        pathprior[i] = 1/(numpy.sum(ell[i])+turnpen[i])
        if npdf2>eps:
            gamma[i] = npdf1*npdf2*C2/C1[i]
        else:
            gamma[i] = 0.
    # Normalise path weights
    pathprior = pathprior/numpy.sum(pathprior)
    qpath = gamma*pathprior
    wt = numpy.sum(qpath)
    if wt<eps:
        # Ignore zero weighted paths
        pidx = 0
        segidx = 0
        ssamp = 0.
        logwt = -1e10
        isvalid = 0
        return pidx, segidx, ssamp, logwt, isvalid
    # If we get here we can sample a valid path
    isvalid = 1
    qpath = qpath/wt
    # Sample a path
    pidx = mystats.drawmultinom(qpath)
    segidx = nsegs[pidx]-1
    # Now sample a position
    if nsegs[pidx]==1:
        ssamp = mystats.tnorm(nu[pidx],zeta[pidx],s0[pidx],ell[pidx][nsegs[pidx]-1])
    else:
        ssamp = mystats.tnorm(nu[pidx],zeta[pidx],0,ell[pidx][nsegs[pidx]-1])
    # Calculate sample weight
    logwt = math.log(wt)+mystats.calc_samp_wt(t,ell[pidx],s0[pidx],ssamp,lb[pidx],C1[pidx],vmean,vvar)
    

    return pidx, segidx, ssamp, logwt, isvalid