def get_affine_matrix(self, path, start_idx, origin, world_scaling_factor): if start_idx > len(path) - 2: return None, None start_pt = path[start_idx] next_pt = path[start_idx + 1] dir_vec = next_pt - start_pt dir_yaw = vec_to_yaw(dir_vec) if self.yaw_rand_range > 0: dir_yaw_offset = random.uniform(-self.yaw_rand_range, self.yaw_rand_range) dir_yaw += dir_yaw_offset if self.pos_rand_image > 0: pos_offset = random.uniform(0, self.pos_rand_range) angle = random.uniform(-np.pi, np.pi) offset_vec = pos_offset * np.array([np.cos(angle), np.sin(angle)]) start_pt += offset_vec affine_s = get_affine_scale_2d([world_scaling_factor, world_scaling_factor]) affine_t = get_affine_trans_2d(-start_pt) affine_rot = get_affine_rot_2d(-dir_yaw) affine_t2 = get_affine_trans_2d(origin) #return affine_t affine_total = np.dot(affine_t2, np.dot(affine_s, np.dot(affine_rot, affine_t))) out_crop_size = tuple(np.asarray(origin) * 2) return affine_total, out_crop_size
def pose_2d_to_mat_np(self, pose_2d, inv=False): pos = pose_2d.position yaw = pose_2d.orientation # Transform the img so that the drone's position ends up at the origin # TODO: Add batch support t1 = get_affine_trans_2d(-pos) # Rotate the img so that it's aligned with the drone's orientation yaw = -yaw t2 = get_affine_rot_2d(-yaw) # Translate the img so that it's centered around the drone t3 = get_affine_trans_2d([self.map_size / 2, self.map_size / 2]) mat = np.dot(t3, np.dot(t2, t1)) # Swap x and y axes (because of the BxCxHxW a.k.a BxCxYxX convention) swapmat = mat[[1, 0, 2], :] mat = swapmat[:, [1, 0, 2]] if inv: mat = np.linalg.inv(mat) return mat
def _transform_img_to_pose(self, image_out, img_to_transform, pose, img_scale): img_size_px = image_out.shape[1] yaw = pose.orientation desired_drone_size = img_size_px * img_scale scale = desired_drone_size / img_to_transform.shape[1] # Transforms, should be applied in order that they are defined # Scale it down to desired size scale_transform = get_affine_scale_2d(np.asarray([scale, scale])) # Translate it so that drone is centered around the origin trans_reset_transform = get_affine_trans_2d( np.asarray([-desired_drone_size / 2, -desired_drone_size / 2])) # Rotate it so that it faces the correct way rot_transform = get_affine_rot_2d(yaw) # Translate it so that it is centered around the correct position trans_transform = get_affine_trans_2d(pose.position) # First scale down, then rotate correctly, then translate to starting position transform = np.dot( trans_transform, np.dot(rot_transform, np.dot(trans_reset_transform, scale_transform))) # Transform: img_t = cv2.warpAffine(img_to_transform, transform[:2, :], (img_size_px, img_size_px)) img_t_mask = (img_t > 1e-10).astype(np.int64) return img_t, img_t_mask
def poses_2d_to_mat_np(self, pose_2d, map_size, inv=False): pos = np.asarray(pose_2d.position) yaw = np.asarray(pose_2d.orientation) # Transform the img so that the drone's position ends up at the origin # TODO: Add batch support t1 = get_affine_trans_2d(-pos, batch=True) # Rotate the img so that it's aligned with the drone's orientation yaw = -yaw t2 = get_affine_rot_2d(-yaw, batch=True) # Translate the img so that it's centered around the drone t3 = get_affine_trans_2d(np.asarray([map_size / 2, map_size / 2]), batch=False) t21 = np.matmul(t2, t1) mat = np.matmul(t3, t21) # Swap x and y axes (because of the BxCxHxW a.k.a BxCxYxX convention) swapmat = mat[:, [1, 0, 2], :] mat = swapmat[:, :, [1, 0, 2]] if inv: mat = np.linalg.inv(mat) return mat
def get_affine_matrix(start_pt, dir_yaw, img_w, img_h): img_origin = np.array([img_w / 2, img_h / 2]) affine_t = get_affine_trans_2d(-start_pt) affine_rot = get_affine_rot_2d(-dir_yaw) affine_t2 = get_affine_trans_2d(img_origin) affine_total = np.dot(affine_t2, np.dot(affine_rot, affine_t)) return affine_total