Esempio n. 1
0
def report_reprojection(F, x0, x1):
    x = concatenate((x0,x1))
    f = algebraic_cost(F, x0, x1)
    J = Jalgebraic_cost_x(F, x0, x1)
    corr = sampson.firstorder_reprojection(x, f, J)
    xx0, xx1 = corr[:3], corr[3:]
    print 'algebraic cost before: %10f' % algebraic_cost(F, x0, x1)
    print 'algebraic cost after: %10f' % algebraic_cost(F, xx0, xx1)
    return xx0, xx1
Esempio n. 2
0
def compute_posterior(x0, x1, R_mean, t_mean):
    x = concatenate((x0,x1))

    Fmat = lambda R,t: fundamental.make_fundamental(eye(3), R, t)
    f_alg = lambda R,t: algebraic_cost(Fmat(R, t), x0, x1)
    J_alg = lambda R,t: Jalgebraic_cost_x(Fmat(R, t), x0, x1)

    # sampson errors
    E_sampson = lambda R,t: ssq(sampson.firstorder_deviation(x, f_alg(R,t), J_alg(R,t)))
    E_sampson_charted = lambda v: E_sampson(se3_chart((R_mean, t_mean), v))

    # sampson reprojections
    sampson_prediction = lambda (R,t): sampson.firstorder_reprojection(x, f_alg(R,t), J_alg(R,t))
    sampson_prediction_charted = lambda v: sampson_prediction(se3_chart((R_mean,t_mean), v))

    # prediction jacobian
    Jprediction = finite_differences.numeric_jacobian(sampson_prediction_charted,
                                                      zeros(6))

    # compute likelihood in terms of R and t
    prediction = sampson_prediction((R_mean, t_mean))
    print prediction
    print Jprediction
    print x
    print SENSOR_INFO
    v,L = beliefs.compute_likelihood(zeros(6), prediction, Jprediction, x, eye(6))

    print 'R_mean:'
    print R_mean
    print 't_mean'
    print t

    # compute mean and variance
    print 'v'
    print v.round(2)
    print 'L'
    print L.round(2)
    mean,cov = beliefs.normal_to_invnormal(v,L)
    print 'mean:'
    print mean
    print 'cov:'
    print cov
Esempio n. 3
0
def reprojection(F, x0, x1):
    x = concatenate((x0,x1))
    f = algebraic_cost(F, x0, x1)
    J = Jalgebraic_cost_x(F, x0, x1)
    corr = sampson.firstorder_reprojection(x, f, J)
    return corr[:3], corr[3:]