Example #1
0
    def relocation(self):
        while True:
            image, image_depth = self.camera.getImage()
            pose, match_points_ref, match_points_cur = self.pnp.getPose(
                self.refImage, image, image_depth,
                self.camera.cameraCalibration.getK())

            R, t = pose.toRt()
            tmpPose = Pose3.fromRt(R, t)
            image_warp, image_depth_warp, warped_mask = cv2.rgbd.warpFrame(
                image, np.float32(image_depth), None,
                tmpPose.inverse().toSE3(),
                self.camera.cameraCalibration.getK(), None)
            image_depth_warp = np.uint16(image_depth_warp)

            self.curPose = pose.copy()  # type: Pose3
            self.curAFD = self.computeAFD(match_points_ref, match_points_cur)
            self.writeInfo(self.data_dir, self.step, image, image_depth, pose,
                           self.curAFD, image_warp, image_depth_warp)

            if self.stopCondition():
                break

            # moving platform
            movingPose = self.curPose  # type: Pose3
            dumpMotion = self.dumpPose(movingPose)
            self.platform.movePose(dumpMotion.inverse())
            # rots = np.array(motion[3:]) * self.dampRatio  # sequence: rz, ry, rx
            # trans = np.array(motion[:3]) * self.dampRatio * -1  # sequence: tx, ty, tz
            # self.platform.rotate(rots[2], rots[1], rots[0])
            # self.platform.translate(trans[0], trans[1], trans[2])

            # self.writeHandInfo(self.data_dir, self.step, rots, trans)

            self.step = self.step + 1
 def getPose(self, refImg : np.ndarray, curImg: np.ndarray, K: np.ndarray)-> (Pose3, np.ndarray, np.ndarray):
     import cv2
     ref_pts, cur_pts = SIFTFeature.detectAndMatch(image1=refImg,image2=curImg)
     E, mask = cv2.findEssentialMat(ref_pts, cur_pts, K, method=cv2.RANSAC, prob=0.999, threshold=1.0)
     points, R, t, mask = cv2.recoverPose(E, ref_pts, cur_pts, mask=mask)
     pose = Pose3.fromRt(R, t)
     return pose, ref_pts, cur_pts
Example #3
0
    def loadPose(cls, json_path):
        with open(json_path, 'r') as f:
            info = json.load(f)
            R = np.array(info['R']).reshape(3, 3)
            t = np.array(info['t'])
            # TODO: moidify PnP, and remove t = t * 1000
            t = t * 1000
            pose = Pose3.fromRt(R, t)

            return pose
    def getPose(self, refImg: np.ndarray, curImg: np.ndarray, K: np.ndarray) -> (Pose3, np.ndarray, np.ndarray):
        ref_pts, cur_pts = SIFTFeature.detectAndMatch(image1=refImg, image2=curImg)  # type: np.ndarray, np.ndarray

        ref_pts_list = list(ref_pts.flatten())
        cur_pts_list = list(cur_pts.flatten())
        K_list = list(K.flatten())

        print(ref_pts[:5])
        print(ref_pts_list[:10])

        R, t = self._fivePointsAlg.calcRP(ref_pts_list, cur_pts_list, K_list)
        R = np.array(R).reshape(3, 3)
        t = np.array(t)

        pose_ref_to_cur = Pose3.fromRt(R, t)
        return pose_ref_to_cur, ref_pts, cur_pts
Example #5
0
    def getPose(self, refImg: np.ndarray, curImg: np.ndarray,
                curImg_depth: np.ndarray, K: np.ndarray):
        import cv2
        ref_pts, cur_pts = SIFTFeature.detectAndMatch(image1=refImg,
                                                      image2=curImg)
        ref_pts_2d, cur_pts_2d, cur_pts_3d = self.get3dPoints(
            ref_pts, cur_pts, curImg_depth, K)
        retval, rvec, tvec, inlier = cv2.solvePnPRansac(
            cur_pts_3d, ref_pts_2d, K, None, flags=cv2.SOLVEPNP_EPNP)

        if retval == True:
            R, jacobian = cv2.Rodrigues(rvec)
            pose = Pose3.fromRt(R, tvec)
            # pose must be inversed
            pose = pose.inverse()
            return pose, ref_pts_2d, cur_pts_2d
        else:
            return Pose3(), ref_pts_2d, cur_pts_2d
Example #6
0
    def poseWithScale(cls, pose: Pose3, s: float):
        R, t = pose.toRt()
        t_norm = cls.normalized(t)
        ts = t_norm * s

        return Pose3.fromRt(R, ts)