def assertJacobian(self, f, Jf, x0, tol=1e-5, h=1e-8, verbose=False): x0 = asarray(x0) # if Jf is a function then just evaluate it once if callable(Jf): J_analytic = atleast_2d(Jf(x0)) else: J_analytic = atleast_2d(Jf) # Compute numeric jacobian J_numeric = finite_differences.numeric_jacobian(f, x0, h) # Check shape self.assertEqual(J_analytic.shape, J_numeric.shape) # Compute error J_abserr = np.abs(J_analytic - J_numeric) mask = np.abs(J_numeric) > 1. J_abserr[mask] /= np.abs(J_numeric[mask]) # Check error maxerr = np.max(J_abserr) # threshold should be independent of matrix size if maxerr > tol: raise JacobianAssertionError(J_numeric, J_analytic) elif verbose: print 'numeric:' print J_numeric print 'analytic:' print J_analytic
def compute_posterior(x0, x1, R_mean, t_mean): x = concatenate((x0,x1)) Fmat = lambda R,t: fundamental.make_fundamental(eye(3), R, t) f_alg = lambda R,t: algebraic_cost(Fmat(R, t), x0, x1) J_alg = lambda R,t: Jalgebraic_cost_x(Fmat(R, t), x0, x1) # sampson errors E_sampson = lambda R,t: ssq(sampson.firstorder_deviation(x, f_alg(R,t), J_alg(R,t))) E_sampson_charted = lambda v: E_sampson(se3_chart((R_mean, t_mean), v)) # sampson reprojections sampson_prediction = lambda (R,t): sampson.firstorder_reprojection(x, f_alg(R,t), J_alg(R,t)) sampson_prediction_charted = lambda v: sampson_prediction(se3_chart((R_mean,t_mean), v)) # prediction jacobian Jprediction = finite_differences.numeric_jacobian(sampson_prediction_charted, zeros(6)) # compute likelihood in terms of R and t prediction = sampson_prediction((R_mean, t_mean)) print prediction print Jprediction print x print SENSOR_INFO v,L = beliefs.compute_likelihood(zeros(6), prediction, Jprediction, x, eye(6)) print 'R_mean:' print R_mean print 't_mean' print t # compute mean and variance print 'v' print v.round(2) print 'L' print L.round(2) mean,cov = beliefs.normal_to_invnormal(v,L) print 'mean:' print mean print 'cov:' print cov