def approach_location(self, point_bl, coarse_stop, fine_stop, voi_radius=.2):
        point_dist = np.linalg.norm(point_bl[0:2,0])
        rospy.loginfo('approach_location: Point is %.3f away.' % point_dist)
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_map = tfu.transform_points(map_T_base_link, point_bl)

        dist_theshold = coarse_stop + .1
        if point_dist > dist_theshold:
            rospy.loginfo('approach_location: Point is greater than %.1f m away (%.3f).  Driving closer.' % (dist_theshold, point_dist))
            rospy.loginfo('approach_location: point_bl ' + str(point_bl.T))

            ret = self.drive_approach_behavior(point_bl, dist_far=coarse_stop)
            base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
            point_bl_t1 = tfu.transform_points(base_link_T_map, point_map)
            if ret != 3:
                dist_end = np.linalg.norm(point_bl_t1[0:2,0])
                if dist_end > dist_theshold:
                    rospy.logerr('approach_location: drive_approach_behavior failed! %.3f' % dist_end)
                    self.robot.sound.say("I am unable to navigate to that location")
                    return False, 'failed'

            ret = self.approach_perpendicular_to_surface(point_bl_t1, voi_radius=voi_radius, dist_approach=fine_stop)
            if ret != 3:
                rospy.logerr('approach_location: approach_perpendicular_to_surface failed!')
                return False, 'failed'

            self.robot.sound.say('done')
            rospy.loginfo('approach_location: DONE DRIVING!')
            return True, 'done'
        else:
            return False, 'ignored'
 def move_base_planner(self, trans, rot):
     #pdb.set_trace()
     p_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), np.matrix(trans).T)
     #Do this to clear out any hallucinated obstacles
     self.turn_to_point(p_bl)
     rvalue = self.robot.base.set_pose(trans, rot, '/map', block=True)
     p_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), np.matrix(trans).T)
     #pdb.set_trace()
     self.robot.base.move_to(p_bl[0:2,0], True)
     t_end, r_end = self.robot.base.get_pose()
     return rvalue==3, np.linalg.norm(t_end[0:2] - np.array(trans)[0:2])
Пример #3
0
    def approach_perpendicular_to_surface(self, point_bl, voi_radius,
                                          dist_approach):
        map_T_base_link0 = tfu.transform('map', 'base_link', self.tf_listener)
        point_map0 = tfu.transform_points(map_T_base_link0, point_bl)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)

        point_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), \
                                        point_map0)
        point_cloud_bl = self.laser_scan.scan(math.radians(180.),
                                              math.radians(-180.), 2.5)
        point_cloud_np_bl = ru.pointcloud_to_np(point_cloud_bl)
        rospy.loginfo('approach_perpendicular_to_surface: pointcloud size %d' \
                % point_cloud_np_bl.shape[1])
        voi_points_bl, limits_bl = i3d.select_rect(point_bl, voi_radius,
                                                   voi_radius, voi_radius,
                                                   point_cloud_np_bl)
        #TODO: use closest plane instead of closest points determined with KDTree
        normal_bl = i3d.calc_normal(voi_points_bl)
        point_in_front_mechanism_bl = point_bl + normal_bl * dist_approach
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_in_front_mechanism_map = tfu.transform_points(
            map_T_base_link, point_in_front_mechanism_bl)

        #Navigate to point (TODO: check for collisions)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: driving for %.3f m to front of surface' \
                % np.linalg.norm(t_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))
        #pdb.set_trace()
        rvalue = self.robot.base.set_pose(
            point_in_front_mechanism_map.T.A1.tolist(), r_current_map, 'map')
        if rvalue != 3:
            return rvalue

        t1_current_map, r1_current_map = self.robot.base.get_pose()
        rospy.loginfo(
            'approach_perpendicular_to_surface: %.3f m away from from of surface'
            % np.linalg.norm(t1_current_map[0:2] -
                             point_in_front_mechanism_map[0:2, 0].T))

        #Rotate to face point (TODO: check for collisions)
        base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
        point_bl = tfu.transform_points(base_link_T_map, point_map)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)
        time.sleep(2.)

        return rvalue
Пример #4
0
    def _sample_points(self):
        rospy.loginfo('Sampling points')
        #evaluate all points
        gaussian = pr.Gaussian(self.expected_loc_bl, \
                               np.matrix([[self.params.uncertainty_x**2, 0,      0], \
                                          [0, self.params.uncertainty_y**2,      0], \
                                          [0,      0, self.params.uncertainty_z**2]]))

        pdf = gaussian.pdf_mat()
        probs = np.matrix(pdf(self.pc_sub_samp_bl))

        #sample unique points
        n_samples = min(self.params.n_samples, self.pc_sub_samp_bl.shape[1])
        pt_indices = list(set(sample_points(probs.T, n_samples)))

        #only keep those that are in bound of points
        sampled_pts3d_bl = self.pc_sub_samp_bl[:, pt_indices]
        sampled_pts3d_image = tfu.transform_points(self.image_T_bl,
                                                   sampled_pts3d_bl)
        sampled_pts2d = self.camera_calibration.project(sampled_pts3d_image)
        sampled_pix2d = np.matrix(np.round(sampled_pts2d))

        #throw away points that are outside of bounds
        x = sampled_pix2d[0, :]
        y = sampled_pix2d[1, :]
        good_pts = np.where((x >= 0) + (x < self.camera_calibration.w) \
                          + (y >= 0) + (y < self.camera_calibration.h))[1].A1

        sampled_pts3d_bl = sampled_pts3d_bl[:, good_pts]
        sampled_pix2d = sampled_pix2d[:, good_pts]

        rospy.loginfo('got %s good points' % str(sampled_pix2d.shape[1]))
        return sampled_pts3d_bl, sampled_pix2d
    def drive_approach_behavior(self, point_bl, dist_far):
    # navigate close to point
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('drive_approach_behavior: point is %.3f m away"' % np.linalg.norm(t_current_map[0:2].T - point_map[0:2,0].T))

        point_dist = np.linalg.norm(point_bl)
        bounded_dist = np.max(point_dist - dist_far, 0)
        point_close_bl = (point_bl / point_dist) * bounded_dist
        point_close_map = tfu.transform_points(map_T_base_link, point_close_bl)
        rvalue = self.robot.base.set_pose(point_close_map.T.A1.tolist(), \
                                          r_current_map, '/map', block=True)
        t_end, r_end = self.robot.base.get_pose()
        rospy.loginfo('drive_approach_behavior: ended up %.3f m away from laser point' % np.linalg.norm(t_end[0:2] - point_map[0:2,0].T))
        rospy.loginfo('drive_approach_behavior: ended up %.3f m away from goal' % np.linalg.norm(t_end[0:2] - point_close_map[0:2,0].T))
        rospy.loginfo('drive_approach_behavior: returned %d' % rvalue)
        return rvalue
Пример #6
0
 def add_example(self, point3d_bl, label, pt2d=None):
     fex = self.feature_extractor
     feature = fex.feature_vec_at_mat(point3d_bl)
     if feature == None:
         return False
     pt2d = fex.calibration_obj.project(tfu.transform_points(fex.image_T_laser, point3d_bl))
     label = np.matrix(label)
     self.add_to_dataset(feature, label, pt2d, point3d_bl)
     self.last_added = {'pt': pt2d, 'l': label}
     return True
Пример #7
0
 def add_example(self, point3d_bl, label, pt2d=None):
     fex = self.feature_extractor
     feature = fex.feature_vec_at_mat(point3d_bl)
     if feature == None:
         return False
     pt2d = fex.calibration_obj.project(
         tfu.transform_points(fex.image_T_laser, point3d_bl))
     label = np.matrix(label)
     self.add_to_dataset(feature, label, pt2d, point3d_bl)
     self.last_added = {'pt': pt2d, 'l': label}
     return True
    def approach_perpendicular_to_surface(self, point_bl, voi_radius, dist_approach):
        map_T_base_link0 = tfu.transform('map', 'base_link', self.tf_listener)
        point_map0 = tfu.transform_points(map_T_base_link0, point_bl)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)

        point_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), \
                                        point_map0)
        point_cloud_bl = self.laser_scan.scan(math.radians(180.), math.radians(-180.), 2.5)
        point_cloud_np_bl = ru.pointcloud_to_np(point_cloud_bl)
        rospy.loginfo('approach_perpendicular_to_surface: pointcloud size %d' \
                % point_cloud_np_bl.shape[1])
        voi_points_bl, limits_bl = i3d.select_rect(point_bl, voi_radius, voi_radius, voi_radius, point_cloud_np_bl)
        #TODO: use closest plane instead of closest points determined with KDTree
        normal_bl = i3d.calc_normal(voi_points_bl)
        point_in_front_mechanism_bl = point_bl + normal_bl * dist_approach
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_in_front_mechanism_map = tfu.transform_points(map_T_base_link, point_in_front_mechanism_bl)

        #Navigate to point (TODO: check for collisions)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: driving for %.3f m to front of surface' \
                % np.linalg.norm(t_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))
        #pdb.set_trace()
        rvalue = self.robot.base.set_pose(point_in_front_mechanism_map.T.A1.tolist(), r_current_map, 'map')
        if rvalue != 3:
            return rvalue

        t1_current_map, r1_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: %.3f m away from from of surface' % np.linalg.norm(t1_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))

        #Rotate to face point (TODO: check for collisions)
        base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
        point_bl = tfu.transform_points(base_link_T_map, point_map)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)
        time.sleep(2.)

        return rvalue
Пример #9
0
def extract_object_localization_features2(start_conditions, tflistener,
                                          arm_used, p_base_map):
    mid_contact_bf, jstate_msgs = find_contacts_and_fk(tflistener, arm_used)
    model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro = find3d_surf(
        start_conditions)

    #Find frame
    surf_loc3d_bf = (np.linalg.inv(start_conditions['pro_T_bf']) \
            * np.row_stack((surf_loc3d_pro, 1+np.zeros((1,surf_loc3d_pro.shape[1])))))[0:3,:]
    frame_bf = create_frame(surf_loc3d_bf, p=np.matrix([-1, 0, 0.]).T)
    center_bf = np.mean(surf_loc3d_bf, 1)

    #Find out what the SURF features point to in this new frame
    bf_R_pro = (start_conditions['pro_T_bf'][0:3, 0:3]).T
    bf_R_obj = frame_bf
    x_bf = frame_bf[:, 0]
    x_pro = bf_R_pro.T * x_bf
    x_ang_pro = math.atan2(x_pro[1, 0], x_pro[0, 0])

    center_pro = tfu.transform_points(start_conditions['pro_T_bf'], center_bf)
    center2d_pro = start_conditions['camera_info'].project(center_pro)

    surf_directions = []
    surf_dir_center = []
    for loc, lap, size, direction, hess in model_surf_loc:
        surf_directions.append(
            ut.standard_rad(np.radians(direction) - x_ang_pro))
        direction_to_center = center2d_pro - np.matrix(loc).T
        surf_dir_center.append(direction_to_center)

    surf_dir_center = np.column_stack(surf_dir_center)
    return {
        'contact_bf': mid_contact_bf,
        'surf_loc3d_pro': surf_loc3d_pro,
        'surf_loc2d_pro': model_surf_loc,
        'point_cloud_2d_pro': point_cloud_2d_pro,
        'surf_directions': surf_directions,  #Orientation
        'surf_pose_dir2d': surf_dir_center,  #Position
        'descriptors': model_surf_descriptors,
        'jtransforms': jstate_msgs,
        'frame_bf': frame_bf,
        'center_bf': center_bf
    }
Пример #10
0
def find3d_surf(start_conditions):
    ## Project pointcloud into 2d
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    # from base_frame to prosilica frame
    point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'], point_cloud_bf)
    point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(start_conditions['camera_info'], point_cloud_pro)
    #point_cloud_2d_pro = .project(point_cloud_pro) 
    ## only count points in image bounds (should be in cam info)
    #cam_info = start_conditions['camera_info']
    #_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
    #                                + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
    #point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
    #point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]

    ## Find 3D SURF features
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))
    surf_loc3d_pro = np.matrix(assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro, point_cloud_2d_pro))
    return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
Пример #11
0
def extract_object_localization_features2(start_conditions, tflistener, arm_used, p_base_map):
    mid_contact_bf, jstate_msgs = find_contacts_and_fk(tflistener, arm_used)
    model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro = find3d_surf(start_conditions)

    #Find frame
    surf_loc3d_bf = (np.linalg.inv(start_conditions['pro_T_bf']) \
            * np.row_stack((surf_loc3d_pro, 1+np.zeros((1,surf_loc3d_pro.shape[1])))))[0:3,:]
    frame_bf = create_frame(surf_loc3d_bf, p=np.matrix([-1, 0, 0.]).T)
    center_bf = np.mean(surf_loc3d_bf, 1)

    #Find out what the SURF features point to in this new frame
    bf_R_pro = (start_conditions['pro_T_bf'][0:3, 0:3]).T
    bf_R_obj = frame_bf
    x_bf     = frame_bf[:,0]
    x_pro    = bf_R_pro.T * x_bf
    x_ang_pro = math.atan2(x_pro[1,0], x_pro[0,0])

    center_pro = tfu.transform_points(start_conditions['pro_T_bf'], center_bf)
    center2d_pro = start_conditions['camera_info'].project(center_pro)

    surf_directions = []
    surf_dir_center = []
    for loc, lap, size, direction, hess in model_surf_loc:
        surf_directions.append(ut.standard_rad(np.radians(direction) - x_ang_pro))
        direction_to_center = center2d_pro - np.matrix(loc).T
        surf_dir_center.append(direction_to_center)

    surf_dir_center = np.column_stack(surf_dir_center)
    return {
            'contact_bf': mid_contact_bf,
            'surf_loc3d_pro': surf_loc3d_pro,
            'surf_loc2d_pro': model_surf_loc,
            'point_cloud_2d_pro': point_cloud_2d_pro,

            'surf_directions': surf_directions, #Orientation
            'surf_pose_dir2d': surf_dir_center,   #Position
            'descriptors': model_surf_descriptors,

            'jtransforms': jstate_msgs,
            'frame_bf': frame_bf,
            'center_bf': center_bf
            }
Пример #12
0
    def _sample_points(self):
        rospy.loginfo("Sampling points")
        # evaluate all points
        gaussian = pr.Gaussian(
            self.expected_loc_bl,
            np.matrix(
                [
                    [self.params.uncertainty_x ** 2, 0, 0],
                    [0, self.params.uncertainty_y ** 2, 0],
                    [0, 0, self.params.uncertainty_z ** 2],
                ]
            ),
        )

        pdf = gaussian.pdf_mat()
        probs = np.matrix(pdf(self.pc_sub_samp_bl))

        # sample unique points
        n_samples = min(self.params.n_samples, self.pc_sub_samp_bl.shape[1])
        pt_indices = list(set(sample_points(probs.T, n_samples)))

        # only keep those that are in bound of points
        sampled_pts3d_bl = self.pc_sub_samp_bl[:, pt_indices]
        sampled_pts3d_image = tfu.transform_points(self.image_T_bl, sampled_pts3d_bl)
        sampled_pts2d = self.camera_calibration.project(sampled_pts3d_image)
        sampled_pix2d = np.matrix(np.round(sampled_pts2d))

        # throw away points that are outside of bounds
        x = sampled_pix2d[0, :]
        y = sampled_pix2d[1, :]
        good_pts = np.where((x >= 0) + (x < self.camera_calibration.w) + (y >= 0) + (y < self.camera_calibration.h))[
            1
        ].A1

        sampled_pts3d_bl = sampled_pts3d_bl[:, good_pts]
        sampled_pix2d = sampled_pix2d[:, good_pts]

        rospy.loginfo("got %s good points" % str(sampled_pix2d.shape[1]))
        return sampled_pts3d_bl, sampled_pix2d
Пример #13
0
def find3d_surf(start_conditions):
    ## Project pointcloud into 2d
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    # from base_frame to prosilica frame
    point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'],
                                           point_cloud_bf)
    point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(
        start_conditions['camera_info'], point_cloud_pro)
    #point_cloud_2d_pro = .project(point_cloud_pro)
    ## only count points in image bounds (should be in cam info)
    #cam_info = start_conditions['camera_info']
    #_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
    #                                + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
    #point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
    #point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]

    ## Find 3D SURF features
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(
        cv.LoadImage(model_file_name))
    surf_loc3d_pro = np.matrix(
        assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro,
                          point_cloud_2d_pro))
    return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
Пример #14
0
def combine_scan_and_image_laser_frame(points_in_laser_frame, image_T_laser, image, cal_obj):
    points_in_image_frame = tfu.transform_points(image_T_laser, points_in_laser_frame)
    return combine_scan_and_image(points_in_image_frame, image, cal_obj)