def build_correspondance(self, visible_markers, camera_calibration, min_marker_perimeter, min_id_confidence): """ - use all visible markers - fit a convex quadrangle around it - use quadrangle verts to establish perpective transform - map all markers into surface space - build up list of found markers and their uv coords """ all_verts = [ m['verts'] for m in visible_markers if m['perimeter'] >= min_marker_perimeter ] if not all_verts: return all_verts = np.array(all_verts, dtype=np.float32) all_verts.shape = ( -1, 1, 2) # [vert,vert,vert,vert,vert...] with vert = [[r,c]] # all_verts_undistorted_normalized centered in img center flipped in y and range [-1,1] all_verts_undistorted_normalized = cv2.undistortPoints( all_verts, np.asarray(camera_calibration['camera_matrix']), np.asarray(camera_calibration['dist_coefs']) * self.use_distortion) hull = cv2.convexHull(all_verts_undistorted_normalized, clockwise=False) #simplify until we have excatly 4 verts if hull.shape[0] > 4: new_hull = cv2.approxPolyDP(hull, epsilon=1, closed=True) if new_hull.shape[0] >= 4: hull = new_hull if hull.shape[0] > 4: curvature = abs(GetAnglesPolyline(hull, closed=True)) most_acute_4_threshold = sorted(curvature)[3] hull = hull[curvature <= most_acute_4_threshold] # all_verts_undistorted_normalized space is flipped in y. # we need to change the order of the hull vertecies hull = hull[[1, 0, 3, 2], :, :] # now we need to roll the hull verts until we have the right orientation: # all_verts_undistorted_normalized space has its origin at the image center. # adding 1 to the coordinates puts the origin at the top left. distance_to_top_left = np.sqrt((hull[:, :, 0] + 1)**2 + (hull[:, :, 1] + 1)**2) bot_left_idx = np.argmin(distance_to_top_left) + 1 hull = np.roll(hull, -bot_left_idx, axis=0) #based on these 4 verts we calculate the transformations into a 0,0 1,1 square space m_from_undistored_norm_space = m_verts_from_screen(hull) self.detected = True # map the markers vertices into the surface space (one can think of these as texture coordinates u,v) marker_uv_coords = cv2.perspectiveTransform( all_verts_undistorted_normalized, m_from_undistored_norm_space) marker_uv_coords.shape = ( -1, 4, 1, 2) #[marker,marker...] marker = [ [[r,c]],[[r,c]] ] # build up a dict of discovered markers. Each with a history of uv coordinates for m, uv in zip(visible_markers, marker_uv_coords): try: self.markers[m['id']].add_uv_coords(uv) except KeyError: self.markers[m['id']] = Support_Marker(m['id']) self.markers[m['id']].add_uv_coords(uv) #average collection of uv correspondences accros detected markers self.build_up_status = sum( [len(m.collected_uv_coords) for m in self.markers.values()]) / float(len(self.markers)) if self.build_up_status >= self.required_build_up: self.finalize_correnspondance()
def build_correspondance(self, visible_markers): """ - use all visible markers - fit a convex quadrangle around it - use quadrangle verts to establish perpective transform - map all markers into surface space - build up list of found markers and their uv coords """ if visible_markers == []: self.m_to_screen = None self.m_from_screen = None self.detected = False return all_verts = np.array([[m['verts_norm'] for m in visible_markers]]) all_verts.shape = ( -1, 1, 2) # [vert,vert,vert,vert,vert...] with vert = [[r,c]] hull = cv2.convexHull(all_verts, clockwise=False) #simplify until we have excatly 4 verts if hull.shape[0] > 4: new_hull = cv2.approxPolyDP(hull, epsilon=1, closed=True) if new_hull.shape[0] >= 4: hull = new_hull if hull.shape[0] > 4: curvature = abs(GetAnglesPolyline(hull, closed=True)) most_acute_4_threshold = sorted(curvature)[3] hull = hull[curvature <= most_acute_4_threshold] #now we need to roll the hull verts until we have the right orientation: distance_to_origin = np.sqrt(hull[:, :, 0]**2 + hull[:, :, 1]**2) top_left_idx = np.argmin(distance_to_origin) hull = np.roll(hull, -top_left_idx, axis=0) #based on these 4 verts we calculate the transformations into a 0,0 1,1 square space self.m_to_screen = m_verts_to_screen(hull) self.m_from_screen = m_verts_from_screen(hull) self.detected = True # map the markers vertices in to the surface space (one can think of these as texture coordinates u,v) marker_uv_coords = cv2.perspectiveTransform(all_verts, self.m_from_screen) marker_uv_coords.shape = ( -1, 4, 1, 2) #[marker,marker...] marker = [ [[r,c]],[[r,c]] ] # build up a dict of discovered markers. Each with a history of uv coordinates for m, uv in zip(visible_markers, marker_uv_coords): try: self.markers[m['id']].add_uv_coords(uv) except KeyError: self.markers[m['id']] = Support_Marker(m['id']) self.markers[m['id']].add_uv_coords(uv) #average collection of uv correspondences accros detected markers self.build_up_status = sum( [len(m.collected_uv_coords) for m in self.markers.values()]) / float(len(self.markers)) if self.build_up_status >= self.required_build_up: self.finalize_correnspondance() self.defined = True