def xtest_image_pan(self): cam = camera.Camera((1.0, 1.0, 89.23, 320., 320., 240.0)) vo = VisualOdometer(cam) prev_af = None pose = None im = Image.open("img1.pgm") for x in [0,5]: # range(0,100,10) + list(reversed(range(0, 100, 10))): lf = im.crop((x, 0, x + 640, 480)) rf = im.crop((x, 0, x + 640, 480)) af = SparseStereoFrame(lf, rf) vo.find_keypoints(af) vo.find_disparities(af) vo.collect_descriptors(af) if prev_af: pairs = vo.temporal_match(prev_af, af) pose = vo.solve(prev_af.kp, af.kp, pairs) for i in range(10): old = prev_af.kp[pairs[i][0]] new = af.kp[pairs[i][1]] print old, new, new[0] - old[0] prev_af = af print "frame", x, "has", len(af.kp), "keypoints", pose
def xtest_image_pan(self): cam = camera.Camera((1.0, 1.0, 89.23, 320., 320., 240.0)) vo = VisualOdometer(cam) prev_af = None pose = None im = Image.open("img1.pgm") for x in [0, 5]: # range(0,100,10) + list(reversed(range(0, 100, 10))): lf = im.crop((x, 0, x + 640, 480)) rf = im.crop((x, 0, x + 640, 480)) af = SparseStereoFrame(lf, rf) vo.find_keypoints(af) vo.find_disparities(af) vo.collect_descriptors(af) if prev_af: pairs = vo.temporal_match(prev_af, af) pose = vo.solve(prev_af.kp, af.kp, pairs) for i in range(10): old = prev_af.kp[pairs[i][0]] new = af.kp[pairs[i][1]] print old, new, new[0] - old[0] prev_af = af print "frame", x, "has", len(af.kp), "keypoints", pose
def test_solve_spin(self): # Test process with one 'ideal' camera, one real-world Videre camera_param_list = [ (200.0, 200.0, 3.00, 320.0, 320.0, 240.0), (389.0, 389.0, 89.23, 323.42, 323.42, 274.95), ] for cam_params in camera_param_list: cam = camera.Camera(cam_params) vo = VisualOdometer(cam) kps = [] model = [(x * 200, y * 200, z * 200) for x in range(-3, 4) for y in range(-3, 4) for z in range(-3, 4)] for angle in range(80): im = Image.new("L", (640, 480)) theta = (angle / 80.) * (pi * 2) R = rotation(theta, 0, 1, 0) kp = [] for (mx, my, mz) in model: pp = None pt_camera = (numpy.dot(numpy.array([mx, my, mz]), R)) (cx, cy, cz) = numpy.array(pt_camera).ravel() if cz > 100: (x, y, d) = cam.cam2pix(cx, cy, cz) if 0 <= x and x < 640 and 0 <= y and y < 480: pp = (x, y, d) circle(im, x, y, 2, 255) kp.append(pp) kps.append(kp) expected_rot = numpy.array( numpy.mat(rotation(2 * pi / 80, 0, 1, 0))).ravel() for i in range(100): i0 = i % 80 i1 = (i + 1) % 80 pairs = [(i, i) for i in range(len(model)) if (kps[i0][i] and kps[i1][i])] def sanify(L, sub): return [i or sub for i in L] (inliers, rot, shift) = vo.solve(sanify(kps[i0], (0, 0, 0)), sanify(kps[i1], (0, 0, 0)), pairs) self.assert_(inliers != 0) self.assertAlmostEqual(shift[0], 0.0, 3) self.assertAlmostEqual(shift[1], 0.0, 3) self.assertAlmostEqual(shift[2], 0.0, 3) for (et, at) in zip(rot, expected_rot): self.assertAlmostEqual(et, at, 3)
def test_solve_spin(self): # Test process with one 'ideal' camera, one real-world Videre camera_param_list = [ (200.0, 200.0, 3.00, 320.0, 320.0, 240.0), (389.0, 389.0, 89.23, 323.42, 323.42, 274.95), ] for cam_params in camera_param_list: cam = camera.Camera(cam_params) vo = VisualOdometer(cam) kps = [] model = [ (x*200,y*200,z*200) for x in range(-3,4) for y in range(-3,4) for z in range(-3,4) ] for angle in range(80): im = Image.new("L", (640, 480)) theta = (angle / 80.) * (pi * 2) R = rotation(theta, 0, 1, 0) kp = [] for (mx,my,mz) in model: pp = None pt_camera = (numpy.dot(numpy.array([mx,my,mz]), R)) (cx,cy,cz) = numpy.array(pt_camera).ravel() if cz > 100: (x,y,d) = cam.cam2pix(cx, cy, cz) if 0 <= x and x < 640 and 0 <= y and y < 480: pp = (x,y,d) circle(im, x, y, 2, 255) kp.append(pp) kps.append(kp) expected_rot = numpy.array(numpy.mat(rotation(2 * pi / 80, 0, 1, 0))).ravel() for i in range(100): i0 = i % 80 i1 = (i + 1) % 80 pairs = [ (i,i) for i in range(len(model)) if (kps[i0][i] and kps[i1][i]) ] def sanify(L, sub): return [i or sub for i in L] (inliers, rot, shift) = vo.solve(sanify(kps[i0],(0,0,0)), sanify(kps[i1],(0,0,0)), pairs) self.assert_(inliers != 0) self.assertAlmostEqual(shift[0], 0.0, 3) self.assertAlmostEqual(shift[1], 0.0, 3) self.assertAlmostEqual(shift[2], 0.0, 3) for (et, at) in zip(rot, expected_rot): self.assertAlmostEqual(et, at, 3)
def test_solve_rotation(self): cam = camera.Camera((389.0, 389.0, 89.23, 323.42, 323.42, 274.95)) vo = VisualOdometer(cam) model = [] radius = 1200.0 kps = [] for angle in range(80): im = Image.new("L", (640, 480)) theta = (angle / 80.) * (pi * 2) R = rotation(theta, 0, 1, 0) kp = [] for s in range(7): for t in range(7): y = -400 pt_model = numpy.array([110 * (s - 3), y, 110 * (t - 3)]).transpose() pt_camera = (numpy.dot(pt_model, R) + numpy.array([0, 0, radius])).transpose() (cx, cy, cz) = [float(i) for i in pt_camera] (x, y, d) = cam.cam2pix(cx, cy, cz) reversed = cam.pix2cam(x, y, d) self.assertAlmostEqual(cx, reversed[0], 3) self.assertAlmostEqual(cy, reversed[1], 3) self.assertAlmostEqual(cz, reversed[2], 3) kp.append((x, y, d)) circle(im, x, y, 2, 255) kps.append(kp) expected_shift = 2 * radius * sin(pi / 80) for i in range(100): i0 = i % 80 i1 = (i + 1) % 80 pairs = zip(range(len(kps[i0])), range(len(kps[i1]))) (inliers, rod, shift) = vo.solve(kps[i0], kps[i1], pairs) actual_shift = sqrt(shift[0] * shift[0] + shift[1] * shift[1] + shift[2] * shift[2]) # Should be able to estimate camera shift to nearest thousandth of mm self.assertAlmostEqual(actual_shift, expected_shift, 3)
def test_solve_rotation(self): cam = camera.Camera((389.0, 389.0, 89.23, 323.42, 323.42, 274.95)) vo = VisualOdometer(cam) model = [] radius = 1200.0 kps = [] for angle in range(80): im = Image.new("L", (640, 480)) theta = (angle / 80.) * (pi * 2) R = rotation(theta, 0, 1, 0) kp = [] for s in range(7): for t in range(7): y = -400 pt_model = numpy.array([110 * (s - 3), y, 110 * (t - 3)]).transpose() pt_camera = (numpy.dot(pt_model, R) + numpy.array([0, 0, radius])).transpose() (cx, cy, cz) = [ float(i) for i in pt_camera ] (x,y,d) = cam.cam2pix(cx, cy, cz) reversed = cam.pix2cam(x, y, d) self.assertAlmostEqual(cx, reversed[0], 3) self.assertAlmostEqual(cy, reversed[1], 3) self.assertAlmostEqual(cz, reversed[2], 3) kp.append((x,y,d)) circle(im, x, y, 2, 255) kps.append(kp) expected_shift = 2 * radius * sin(pi / 80) for i in range(100): i0 = i % 80 i1 = (i + 1) % 80 pairs = zip(range(len(kps[i0])), range(len(kps[i1]))) (inliers, rod, shift) = vo.solve(kps[i0], kps[i1], pairs) actual_shift = sqrt(shift[0]*shift[0] + shift[1]*shift[1] + shift[2]*shift[2]) # Should be able to estimate camera shift to nearest thousandth of mm self.assertAlmostEqual(actual_shift, expected_shift, 3)
if cam and topic.endswith("videre/images"): imgR = imgAdapted(msg.images[0]) imgL = imgAdapted(msg.images[1]) assert msg.images[0].label == "right_rectified" assert msg.images[1].label == "left_rectified" frame = SparseStereoFrame(imgL, imgR) vo.find_keypoints(frame) vo.find_disparities(frame) #frame.kp = [ (x,y,d) for (x,y,d) in frame.kp if d > 8] all_ds += [d for (x, y, d) in frame.kp] vo.collect_descriptors(frame) if prev_frame: pairs = vo.temporal_match(prev_frame, frame) solution = vo.solve(prev_frame.kp, frame.kp, pairs, True) (inl, rot, shift) = solution sos += numpy.array(shift) print sos prev_frame = frame framecounter += 1 ds[filename] = all_ds for i, (filename, ds) in enumerate(ds.items()): pylab.figure(i + 1) pylab.title(filename) pylab.hist(ds, 50, normed=1, facecolor='green', alpha=0.75) pylab.show()
if cam and topic.endswith("videre/images"): imgR = imgAdapted(msg.images[0]) imgL = imgAdapted(msg.images[1]) assert msg.images[0].label == "right_rectified" assert msg.images[1].label == "left_rectified" frame = SparseStereoFrame(imgL, imgR) vo.find_keypoints(frame) vo.find_disparities(frame) #frame.kp = [ (x,y,d) for (x,y,d) in frame.kp if d > 8] all_ds += [ d for (x,y,d) in frame.kp ] vo.collect_descriptors(frame) if prev_frame: pairs = vo.temporal_match(prev_frame, frame) solution = vo.solve(prev_frame.kp, frame.kp, pairs, True) (inl, rot, shift) = solution sos += numpy.array(shift) print sos prev_frame = frame framecounter += 1 ds[filename] = all_ds for i,(filename, ds) in enumerate(ds.items()): pylab.figure(i+1) pylab.title(filename) pylab.hist(ds, 50, normed=1, facecolor='green', alpha=0.75) pylab.show()