Пример #1
0
    def update_poses_and_points(self,
                                keyframes,
                                correction=None,
                                exclude=set()):

        for kf in keyframes:
            if len(exclude) > 0 and kf in exclude:
                continue
            uncorrected = g2o.Isometry3d(kf.orientation, kf.position)
            if correction is None:
                vertex = self.vertex(kf.id)
                if vertex.fixed():
                    continue
                corrected = vertex.estimate()
            else:
                corrected = uncorrected * correction

            delta = uncorrected.inverse() * corrected
            if (g2o.AngleAxis(delta.rotation()).angle() < 0.02
                    and np.linalg.norm(delta.translation()) < 0.03):  # 1°, 3cm
                continue

            for m in kf.measurements():
                if m.from_triangulation():
                    old = m.mappoint.position
                    new = corrected * (uncorrected.inverse() * old)
                    m.mappoint.update_position(new)
                    # update normal ?
            kf.update_pose(corrected)
Пример #2
0
    def predict_pose(self,
                     timestamp,
                     prev_position=None,
                     prev_orientation=None):
        '''
        Predict the next camera pose.
        '''
        if prev_position is not None:
            self.position = prev_position
        if prev_orientation is not None:
            self.orientation = prev_orientation

        if not self.initialized:
            return (g2o.Isometry3d(self.orientation,
                                   self.position), self.covariance)

        dt = timestamp - self.timestamp

        delta_angle = g2o.AngleAxis(self.v_angular_angle * dt * self.damp,
                                    self.v_angular_axis)
        delta_orientation = g2o.Quaternion(delta_angle)

        orientation = delta_orientation * self.orientation
        position = self.position + delta_orientation * (self.v_linear * dt *
                                                        self.damp)

        return (g2o.Isometry3d(orientation, position), self.covariance)
Пример #3
0
    def update_pose(self,
                    timestamp,
                    new_position,
                    new_orientation,
                    new_covariance=None):
        '''
        Update the motion model when given a new camera pose.
        '''
        if self.initialized:
            dt = timestamp - self.timestamp
            assert dt != 0

            v_linear = (new_position - self.position) / dt
            self.v_linear = v_linear

            delta_q = self.orientation.inverse() * new_orientation
            delta_q.normalize()

            delta_angle = g2o.AngleAxis(delta_q)
            angle = delta_angle.angle()
            axis = delta_angle.axis()

            if angle > np.pi:
                axis = axis * -1
                angle = 2 * np.pi - angle

            self.v_angular_axis = axis
            self.v_angular_angle = angle / dt

        self.timestamp = timestamp
        self.position = new_position
        self.orientation = new_orientation
        self.covariance = new_covariance
        self.initialized = True
Пример #4
0
    def predict_pose(self, timestamp):
        '''
        Predict the next camera pose.
        '''
        if not self.initialized:
            return (g2o.Isometry3d(self.orientation,
                                   self.position), self.covariance)

        dt = timestamp - self.timestamp

        # Use quaternion instead of rotation matrix due to performance
        delta_angle = g2o.AngleAxis(self.v_angular_angle * dt * self.damp,
                                    self.v_angular_axis)
        delta_orientation = g2o.Quaternion(delta_angle)

        position = self.position + self.v_linear * dt * self.damp
        orientation = self.orientation * delta_orientation

        return (g2o.Isometry3d(orientation, position), self.covariance)
Пример #5
0
    def update_pose(self, timestamp, 
            new_position, new_orientation, new_covariance=None):
        '''
        Update the motion model when given a new camera pose.
        '''

        transformation_matrix = np.identity(4, dtype=np.float32)
        transformation_matrix[0:3, 0:3] = np.array(new_orientation.rotation_matrix(), dtype=np.float32)
        transformation_matrix[0:3, 3] = new_position

        self.f.write(" ".join(['{:.6e}'.format(x) for x in transformation_matrix.flatten()[0:12]]) + "\n")

        if self.initialized:

            dt = timestamp - self.timestamp
            assert dt != 0

            v_linear = (new_position - self.position) / dt
            self.v_linear = v_linear

            delta_q = self.orientation.inverse() * new_orientation
            delta_q.normalize()

            delta_angle = g2o.AngleAxis(delta_q)
            angle = delta_angle.angle()
            axis = delta_angle.axis()

            if angle > np.pi:
                axis = axis * -1
                angle = 2 * np.pi - angle

            self.v_angular_axis = axis
            self.v_angular_angle = angle / dt
            
        self.timestamp = timestamp
        self.position = new_position
        self.orientation = new_orientation
        self.covariance = new_covariance
        self.initialized = True
Пример #6
0
def match_and_estimate(query_keyframe, match_keyframe, params):
    query = defaultdict(list)
    for m in query_keyframe.measurements():
        if m.from_triangulation():
            query['measurements'].append(m)
            query['kps1'].append(m.get_keypoint(0))
            query['kps2'].append(m.get_keypoint(1))
            query['desps1'].append(m.get_descriptor(0))
            query['desps2'].append(m.get_descriptor(1))
            n = len(query['matches'])
            query['matches'].append(cv2.DMatch(n, n, 0))

    match = defaultdict(list)
    for m in match_keyframe.measurements():
        if m.from_triangulation():
            match['measurements'].append(m)
            match['kps1'].append(m.get_keypoint(0))
            match['kps2'].append(m.get_keypoint(1))
            match['desps1'].append(m.get_descriptor(0))
            match['desps2'].append(m.get_descriptor(1))
            n = len(match['matches'])
            match['matches'].append(cv2.DMatch(n, n, 0))

    stereo_matches = query_keyframe.feature.circular_stereo_match(
        query['desps1'], query['desps2'], query['matches'], match['desps1'],
        match['desps2'], match['matches'], params.matching_distance,
        params.lc_inliers_threshold)

    n_matches = len(stereo_matches)
    if n_matches < params.lc_inliers_threshold:
        return None

    for m13, _ in stereo_matches:
        i, j = m13.queryIdx, m13.trainIdx
        query['px'].append(query['kps1'][i].pt)
        query['pt'].append(query['measurements'][i].view)
        match['px'].append(match['kps1'][j].pt)
        match['pt'].append(match['measurements'][j].view)

    # query_keyframe's pose in match_keyframe's coordinates frame
    T13, inliers13 = solve_pnp_ransac(query['pt'], match['px'],
                                      match_keyframe.cam.intrinsic)

    T31, inliers31 = solve_pnp_ransac(match['pt'], query['px'],
                                      query_keyframe.cam.intrinsic)

    if T13 is None or T13 is None:
        return None

    delta = T31 * T13
    if (g2o.AngleAxis(delta.rotation()).angle() > 0.1
            or np.linalg.norm(delta.translation()) > 0.5):  # 5.7° or 0.5m
        return None

    n_inliers = len(set(inliers13) & set(inliers31))
    query_pose = g2o.Isometry3d(query_keyframe.orientation,
                                query_keyframe.position)
    match_pose = g2o.Isometry3d(match_keyframe.orientation,
                                match_keyframe.position)

    # TODO: combine T13 and T31
    constraint = T13
    estimated_pose = match_pose * constraint
    correction = query_pose.inverse() * estimated_pose

    return namedtuple('MatchEstimateResult', [
        'estimated_pose', 'constraint', 'correction',
        'query_stereo_measurements', 'match_stereo_measurements',
        'stereo_matches', 'n_matches', 'n_inliers'
    ])(estimated_pose, constraint, correction, query['measurements'],
       match['measurements'], stereo_matches, n_matches, n_inliers)
Пример #7
0
 def get_rotation_angle_axis(self):
     angle_axis = g2o.AngleAxis(self._pose.orientation())
     #angle = angle_axis.angle()
     #axis = angle_axis.axis()
     return angle_axis
Пример #8
0
def match_and_estimate(query_keyframe, match_keyframe, params):
    query = defaultdict(list)
    for kp, desp in zip(query_keyframe.feature.keypoints,
                        query_keyframe.feature.descriptors):
        query['kps'].append(kp)
        query['desps'].append(desp)
        query['px'].append(kp.pt)

    match = defaultdict(list)
    for kp, desp in zip(match_keyframe.feature.keypoints,
                        match_keyframe.feature.descriptors):
        match['kps'].append(kp)
        match['desps'].append(desp)
        match['px'].append(kp.pt)

    matches = query_keyframe.feature.direct_match(
        query['desps'], match['desps'], params.matching_distance,
        params.matching_distance_ratio)

    query_pts = depth_to_3d(query_keyframe.depth, query['px'],
                            query_keyframe.cam)
    match_pts = depth_to_3d(match_keyframe.depth, match['px'],
                            match_keyframe.cam)

    if len(matches) < params.lc_inliers_threshold:
        return None

    near = query_keyframe.cam.depth_near
    far = query_keyframe.cam.depth_far
    for (i, j) in matches:
        if (near <= query_pts[i][2] <= far):
            query['pt12'].append(query_pts[i])
            query['px12'].append(query['kps'][i].pt)
            match['px12'].append(match['kps'][j].pt)
        if (near <= match_pts[j][2] <= far):
            query['px21'].append(query['kps'][i].pt)
            match['px21'].append(match['kps'][j].pt)
            match['pt21'].append(match_pts[j])

    if len(query['pt12']) < 6 or len(match['pt21']) < 6:
        return None

    T12, inliers12 = solve_pnp_ransac(query['pt12'], match['px12'],
                                      match_keyframe.cam.intrinsic)

    T21, inliers21 = solve_pnp_ransac(match['pt21'], query['px21'],
                                      query_keyframe.cam.intrinsic)

    if T12 is None or T21 is None:
        return None

    delta = T21 * T12
    if (g2o.AngleAxis(delta.rotation()).angle() > 0.06
            or np.linalg.norm(delta.translation()) > 0.06):  # 3° or 0.06m
        return None

    ms = set()
    qd = dict()
    md = dict()
    for i in inliers12:
        pt1 = (int(query['px12'][i][0]), int(query['px12'][i][1]))
        pt2 = (int(match['px12'][i][0]), int(match['px12'][i][1]))
        ms.add((pt1, pt2))
    for i in inliers21:
        pt1 = (int(query['px21'][i][0]), int(query['px21'][i][1]))
        pt2 = (int(match['px21'][i][0]), int(match['px21'][i][1]))
        ms.add((pt1, pt2))
    for i, (pt1, pt2) in enumerate(ms):
        qd[pt1] = i
        md[pt2] = i

    qd2 = dict()
    md2 = dict()
    for m in query_keyframe.measurements():
        pt = m.get_keypoint(0).pt
        idx = qd.get((int(pt[0]), int(pt[1])), None)
        if idx is not None:
            qd2[idx] = m
    for m in match_keyframe.measurements():
        pt = m.get_keypoint(0).pt
        idx = md.get((int(pt[0]), int(pt[1])), None)
        if idx is not None:
            md2[idx] = m
    shared_measurements = [(qd2[i], md2[i]) for i in (qd2.keys() & md2.keys())]

    n_matches = (len(query['pt12']) + len(match['pt21'])) / 2.
    n_inliers = max(len(inliers12), len(inliers21))
    query_pose = g2o.Isometry3d(query_keyframe.orientation,
                                query_keyframe.position)
    match_pose = g2o.Isometry3d(match_keyframe.orientation,
                                match_keyframe.position)

    # TODO: combine T12 and T21
    constraint = T12
    estimated_pose = match_pose * constraint
    correction = query_pose.inverse() * estimated_pose

    return namedtuple('MatchEstimateResult', [
        'estimated_pose', 'constraint', 'correction', 'shared_measurements',
        'n_matches', 'n_inliers'
    ])(estimated_pose, constraint, correction, shared_measurements, n_matches,
       n_inliers)
Пример #9
0
import sys
import numpy as np
import math

sys.path.append("../../")
from config import Config

import g2o

position = np.zeros(3)  # delta translation
orientation = g2o.Quaternion()

pose = g2o.Isometry3d(orientation, position)

print('pose: ', pose.matrix())

position[0] = 1

print('pose: ', pose.matrix())

orientation2 = g2o.Quaternion(g2o.AngleAxis(math.pi, [0, 0, 1]))

print('position', position)
print('orientation2*position', orientation2 * position)