Ejemplo n.º 1
0
def test_retraction(robot, info, tool_link, distance=0.1, **kwargs):
    ik_joints = get_ik_joints(robot, info, tool_link)
    start_pose = get_link_pose(robot, tool_link)
    end_pose = multiply(start_pose, Pose(Point(z=-distance)))
    handles = [
        add_line(point_from_pose(start_pose),
                 point_from_pose(end_pose),
                 color=BLUE)
    ]
    #handles.extend(draw_pose(start_pose))
    #handles.extend(draw_pose(end_pose))
    path = []
    pose_path = list(
        interpolate_poses(start_pose, end_pose, pos_step_size=0.01))
    for i, pose in enumerate(pose_path):
        print('Waypoint: {}/{}'.format(i + 1, len(pose_path)))
        handles.extend(draw_pose(pose))
        conf = next(
            either_inverse_kinematics(robot, info, tool_link, pose, **kwargs),
            None)
        if conf is None:
            print('Failure!')
            path = None
            wait_for_user()
            break
        set_joint_positions(robot, ik_joints, conf)
        path.append(conf)
        wait_for_user()
        # for conf in islice(ikfast_inverse_kinematics(robot, info, tool_link, pose, max_attempts=INF, max_distance=0.5), 1):
        #    set_joint_positions(robot, joints[:len(conf)], conf)
        #    wait_for_user()
    remove_handles(handles)
    return path
Ejemplo n.º 2
0
def are_visible(world):
    ray_names = []
    rays = []
    for name in world.movable:
        for camera, info in world.cameras.items():
            camera_pose = get_pose(info.body)
            camera_point = point_from_pose(camera_pose)
            point = point_from_pose(get_pose(world.get_body(name)))
            if is_visible_point(CAMERA_MATRIX,
                                KINECT_DEPTH,
                                point,
                                camera_pose=camera_pose):
                ray_names.append(name)
                rays.append(Ray(camera_point, point))
    ray_results = batch_ray_collision(rays)
    visible_indices = [
        idx for idx, (name, result) in enumerate(zip(ray_names, ray_results))
        if result.objectUniqueId == world.get_body(name)
    ]
    visible_names = {ray_names[idx] for idx in visible_indices}
    print('Detected:', sorted(visible_names))
    if has_gui():
        handles = [
            add_line(rays[idx].start, rays[idx].end, color=BLUE)
            for idx in visible_indices
        ]
        wait_for_duration(1.0)
        remove_handles(handles)
    # TODO: the object drop seems to happen around here
    return visible_names
Ejemplo n.º 3
0
    def score_fn(plan):
        assert plan is not None
        initial_distance = get_distance(
            point_from_pose(initial_pose)[:2], goal_pos2d)
        final_pose = world.perception.get_pose(block_name)
        final_distance = get_distance(
            point_from_pose(final_pose)[:2], goal_pos2d)
        quat_distance = quat_angle_between(quat_from_pose(initial_pose),
                                           quat_from_pose(final_pose))
        print(
            'Initial: {:.5f} m | Final: {:.5f} | Rotation: {:.5f} rads'.format(
                initial_distance, final_distance, quat_distance))
        # TODO: compare orientation to final predicted orientation
        # TODO: record simulation time in the event that the controller gets stuck

        score = {
            'initial_distance': initial_distance,
            'final_distance': final_distance,
            'rotation': quat_distance,
            DYNAMICS: parameters_from_name,
        }

        #_, args = find_unique(lambda a: a[0] == 'push', plan)
        #control = args[-1]
        return score
Ejemplo n.º 4
0
    def score_fn(plan):
        assert plan is not None
        final_pose = world.get_pose(bowl_name)
        point_distance = get_distance(point_from_pose(initial_pose),
                                      point_from_pose(final_pose))  #, norm=2)
        quat_distance = quat_angle_between(quat_from_pose(initial_pose),
                                           quat_from_pose(final_pose))
        print('Translation: {:.5f} m | Rotation: {:.5f} rads'.format(
            point_distance, quat_distance))

        with ClientSaver(world.client):
            bowl_beads = get_contained_beads(bowl_body, init_beads)
            fraction_bowl = float(
                len(bowl_beads)) / len(init_beads) if init_beads else 0
            mass_in_bowl = sum(map(get_mass, bowl_beads))
            spoon_beads = get_contained_beads(world.get_body(spoon_name),
                                              init_beads)
            fraction_spoon = float(
                len(spoon_beads)) / len(init_beads) if init_beads else 0
            mass_in_spoon = sum(map(get_mass, spoon_beads))
        print('In Bowl: {:.3f} | In Spoon: {:.3f}'.format(
            fraction_bowl, fraction_spoon))
        # TODO: measure change in roll/pitch

        # TODO: could make latent parameters field
        score = {
            # Displacements
            'bowl_translation': point_distance,
            'bowl_rotation': quat_distance,
            # Masses
            'total_mass': init_mass,
            'mass_in_bowl': mass_in_bowl,
            'mass_in_spoon': mass_in_spoon,
            'spoon_mass_capacity':
            (init_mass / len(init_beads)) * spoon_capacity,
            # Counts
            'num_beads': len(init_beads),
            'bowl_beads': len(bowl_beads),
            'spoon_beads': len(spoon_beads),
            'spoon_capacity': spoon_capacity,
            # Fractions
            'fraction_in_bowl': fraction_bowl,
            'fraction_in_spoon': fraction_spoon,
            # Latent
            'bead_radius': bead_radius,
            DYNAMICS: parameters_from_name
        }

        fraction_filled = float(score['spoon_beads']) / score['spoon_capacity']
        spilled_beads = score['num_beads'] - (score['bowl_beads'] +
                                              score['spoon_beads'])
        fraction_spilled = float(spilled_beads) / score['num_beads']
        print('Fraction Filled: {} | Fraction Spilled: {}'.format(
            fraction_filled, fraction_spilled))

        #_, args = find_unique(lambda a: a[0] == 'scoop', plan)
        #control = args[-1]
        return score
def main():
    connect(use_gui=True)
    with HideOutput():
        pr2 = load_model(
            "models/drake/pr2_description/urdf/pr2_simplified.urdf")
    set_joint_positions(pr2, joints_from_names(pr2, PR2_GROUPS['left_arm']),
                        REST_LEFT_ARM)
    set_joint_positions(pr2, joints_from_names(pr2, PR2_GROUPS['right_arm']),
                        rightarm_from_leftarm(REST_LEFT_ARM))
    set_joint_positions(pr2, joints_from_names(pr2, PR2_GROUPS['torso']),
                        [0.2])
    dump_body(pr2)

    block = load_model(BLOCK_URDF, fixed_base=False)
    set_point(block, [2, 0.5, 1])
    target_point = point_from_pose(get_pose(block))

    # head_link = link_from_name(pr2, HEAD_LINK)
    head_joints = joints_from_names(pr2, PR2_GROUPS['head'])
    head_link = head_joints[-1]

    #max_detect_distance = 2.5
    max_register_distance = 1.0
    distance_range = (max_register_distance / 2, max_register_distance)
    base_generator = visible_base_generator(pr2, target_point, distance_range)

    base_joints = joints_from_names(pr2, PR2_GROUPS['base'])
    for i in range(5):
        base_conf = next(base_generator)
        set_joint_positions(pr2, base_joints, base_conf)

        p.addUserDebugLine(point_from_pose(get_link_pose(pr2, head_link)),
                           target_point,
                           lineColorRGB=(1, 0, 0))  # addUserDebugText
        p.addUserDebugLine(point_from_pose(
            get_link_pose(pr2, link_from_name(pr2, HEAD_LINK_NAME))),
                           target_point,
                           lineColorRGB=(0, 0, 1))  # addUserDebugText

        # head_conf = sub_inverse_kinematics(pr2, head_joints[0], HEAD_LINK, )
        head_conf = inverse_visibility(pr2, target_point)
        set_joint_positions(pr2, head_joints, head_conf)
        print(get_detections(pr2))
        # TODO: does this detect the robot sometimes?

        detect_mesh, z = get_detection_cone(pr2, block)
        detect_cone = create_mesh(detect_mesh, color=(0, 1, 0, 0.5))
        set_pose(detect_cone,
                 get_link_pose(pr2, link_from_name(pr2, HEAD_LINK_NAME)))
        view_cone = get_viewcone(depth=2.5, color=(1, 0, 0, 0.25))
        set_pose(view_cone,
                 get_link_pose(pr2, link_from_name(pr2, HEAD_LINK_NAME)))
        wait_for_user()
        remove_body(detect_cone)
        remove_body(view_cone)

    disconnect()
Ejemplo n.º 6
0
Archivo: pour.py Proyecto: lyltc1/LTAMP
def get_water_path(bowl_body, bowl_pose, cup_body, pose_waypoints):
    cup_pose = pose_waypoints[0]
    bowl_origin_from_base = get_urdf_from_base(
        bowl_body)  # TODO: reference pose
    cup_origin_from_base = get_urdf_from_base(cup_body)
    ray_start = point_from_pose(multiply(cup_pose, cup_origin_from_base))
    ray_end = point_from_pose(multiply(bowl_pose, bowl_origin_from_base))
    water_path = interpolate_poses((ray_start, unit_quat()),
                                   (ray_end, unit_quat()),
                                   pos_step_size=0.01)
    return water_path
Ejemplo n.º 7
0
def visualize_learned_pours(learner, bowl_type='blue_bowl', cup_type='red_cup', num_steps=None):
    learner.query_type = REJECTION # BEST | REJECTION | STRADDLE
    learner.validity_learner = None

    world = create_world()
    #add_obstacles()
    #environment = get_bodies()

    world.bodies[bowl_type] = load_cup_bowl(bowl_type)
    world.bodies[cup_type] = load_cup_bowl(cup_type)
    feature = get_pour_feature(world, bowl_type, cup_type)
    draw_pose(get_pose(world.get_body(bowl_type)), length=0.2)
    # TODO: sample different sizes

    print('Feature:', feature)
    for parameter in learner.parameter_generator(world, feature, valid=False):
        handles = []
        print('Parameter:', parameter)
        valid = is_valid_pour(world, feature, parameter)
        score = learner.score(feature, parameter)
        x = learner.func.x_from_feature_parameter(feature, parameter)
        [prediction] = learner.predict_x(np.array([x]), noise=True)
        print('Valid: {} | Mean: {:.3f} | Var: {:.3f} | Score: {:.3f}'.format(
            valid, prediction['mean'], prediction['variance'], score))

        #fraction = rescale(clip(prediction['mean'], *DEFAULT_INTERVAL), DEFAULT_INTERVAL, new_interval=(0, 1))
        #color = (1 - fraction) * np.array(RED) + fraction * np.array(GREEN)
        #set_color(world.get_body(cup_type), apply_alpha(color, alpha=0.25))
        # TODO: overlay many cups at different poses
        bowl_from_pivot = get_bowl_from_pivot(world, feature, parameter)
        #handles.extend(draw_pose(bowl_from_pivot))
        handles.extend(draw_point(point_from_pose(bowl_from_pivot), color=BLACK))

        # TODO: could label bowl, cup dimensions
        path, _ = pour_path_from_parameter(world, feature, parameter, cup_yaw=0)
        for p1, p2 in safe_zip(path[:-1], path[1:]):
            handles.append(add_line(point_from_pose(p1), point_from_pose(p2), color=RED))

        if num_steps is not None:
            path = path[:num_steps]
        for i, pose in enumerate(path[::-1]):
            #step_handles = draw_pose(pose, length=0.1)
            #step_handles = draw_point(point_from_pose(pose), color=BLACK)
            set_pose(world.get_body(cup_type), pose)
            print('Step {}/{}'.format(i+1, len(path)))
            wait_for_user()
            #remove_handles(step_handles)
        remove_handles(handles)
Ejemplo n.º 8
0
def save_inverse_reachability(robot, arm, grasp_type, tool_link,
                              gripper_from_base_list):
    # TODO: store value of torso and roll joint for the IK database. Sample the roll joint.
    # TODO: hash the pr2 urdf as well
    filename = IR_FILENAME.format(grasp_type, arm)
    path = get_database_file(filename)
    data = {
        'filename': filename,
        'robot': get_body_name(robot),
        'grasp_type': grasp_type,
        'arm': arm,
        'torso': get_group_conf(robot, 'torso'),
        'carry_conf': get_carry_conf(arm, grasp_type),
        'tool_link': tool_link,
        'ikfast': is_ik_compiled(),
        'gripper_from_base': gripper_from_base_list,
    }
    write_pickle(path, data)

    if has_gui():
        handles = []
        for gripper_from_base in gripper_from_base_list:
            handles.extend(
                draw_point(point_from_pose(gripper_from_base),
                           color=(1, 0, 0)))
        wait_for_user()
    return path
Ejemplo n.º 9
0
def relative_detections(belief, detections):
    world = belief.world
    rel_detections = {}
    world_aabb = world.get_world_aabb()
    for name in detections:
        if name == belief.holding:
            continue
        body = world.get_body(name)
        for observed_pose in detections[name]:
            world_z_axis = np.array([0, 0, 1])
            local_z_axis = tform_point(observed_pose, world_z_axis)
            if np.pi / 2 < angle_between(world_z_axis, local_z_axis):
                observed_pose = multiply(observed_pose,
                                         Pose(euler=Euler(roll=np.pi)))
            if not aabb_contains_point(point_from_pose(observed_pose),
                                       world_aabb):
                continue
            set_pose(body, observed_pose)
            support = world.get_supporting(name)
            #assert support is not None
            # Could also fix as relative to the world
            if support is None:
                # TODO: prune if nowhere near a surface (e.g. on the robot)
                relative_pose = create_world_pose(world, name, init=True)
            else:
                relative_pose = create_relative_pose(world,
                                                     name,
                                                     support,
                                                     init=True)
            rel_detections.setdefault(name, []).append(relative_pose)
            # relative_pose.assign()
    return rel_detections
Ejemplo n.º 10
0
def load_json_problem(problem_filename):
    reset_simulation()
    json_path = os.path.join(get_json_directory(), problem_filename)
    with open(json_path, 'r') as f:
        problem_json = json.loads(f.read())

    set_camera_pose(point_from_pose(parse_pose(problem_json['camera'])))

    task_json = problem_json['task']
    important_bodies = []
    for field in BODY_FIELDS:
        important_bodies.extend(task_json[field])

    robots = {
        robot['name']: parse_robot(robot)
        for robot in problem_json['robots']
    }
    bodies = {
        body['name']: parse_body(body, (body['name'] in important_bodies))
        for body in problem_json['bodies']
    }
    regions = {
        region['name']: parse_region(region)
        for region in task_json['regions']
    }
    #print(get_image())

    return parse_task(task_json, robots, bodies, regions)
Ejemplo n.º 11
0
 def fix_pose(self, name, pose=None, fraction=0.5):
     body = self.get_body(name)
     if pose is None:
         pose = get_pose(body)
     else:
         set_pose(body, pose)
     # TODO: can also drop in simulation
     x, y, z = point_from_pose(pose)
     roll, pitch, yaw = euler_from_quat(quat_from_pose(pose))
     quat = quat_from_euler(Euler(yaw=yaw))
     set_quat(body, quat)
     surface_name = self.get_supporting(name)
     if surface_name is None:
         return None, None
     if fraction == 0:
         new_pose = (Point(x, y, z), quat)
         return new_pose, surface_name
     surface_aabb = compute_surface_aabb(self, surface_name)
     new_z = (1 - fraction) * z + fraction * stable_z_on_aabb(
         body, surface_aabb)
     point = Point(x, y, new_z)
     set_point(body, point)
     print('{} error: roll={:.3f}, pitch={:.3f}, z-delta: {:.3f}'.format(
         name, roll, pitch, new_z - z))
     new_pose = (point, quat)
     return new_pose, surface_name
Ejemplo n.º 12
0
def compute_surface_aabb(world, surface_name):
    if surface_name in ENV_SURFACES: # TODO: clean this up
        # TODO: the aabb for golf is off the table
        surface_body = world.environment_bodies[surface_name]
        return get_aabb(surface_body)
    surface_body = world.kitchen
    surface_name, shape_name, _ = surface_from_name(surface_name)
    surface_link = link_from_name(surface_body, surface_name)
    surface_pose = get_link_pose(surface_body, surface_link)
    if shape_name == SURFACE_TOP:
        surface_aabb = get_aabb(surface_body, surface_link)
    elif shape_name == SURFACE_BOTTOM:
        data = sorted(get_collision_data(surface_body, surface_link),
                      key=lambda d: point_from_pose(get_data_pose(d))[2])[0]
        extent = np.array(get_data_extents(data))
        aabb = AABB(-extent/2., +extent/2.)
        vertices = apply_affine(multiply(surface_pose, get_data_pose(data)), get_aabb_vertices(aabb))
        surface_aabb = aabb_from_points(vertices)
    else:
        [data] = filter(lambda d: d.filename != '',
                        get_collision_data(surface_body, surface_link))
        meshes = read_obj(data.filename)
        #colors = spaced_colors(len(meshes))
        #set_color(surface_body, link=surface_link, color=np.zeros(4))
        mesh = meshes[shape_name]
        #for i, (name, mesh) in enumerate(meshes.items()):
        mesh = tform_mesh(multiply(surface_pose, get_data_pose(data)), mesh=mesh)
        surface_aabb = aabb_from_points(mesh.vertices)
        #add_text(surface_name, position=surface_aabb[1])
        #draw_mesh(mesh, color=colors[i])
        #wait_for_user()
    #draw_aabb(surface_aabb)
    #wait_for_user()
    return surface_aabb
Ejemplo n.º 13
0
 def at(self, time_from_start):
     current_conf = super(PrintTrajectory, self).at(time_from_start)
     if current_conf is None:
         if self.last_point is not None:
             #set_configuration(self.robot, INITIAL_CONF) # TODO: return here
             end_point = point_from_pose(self.tool_path[-1])
             self.handles.append(
                 add_line(self.last_point, end_point, color=RED))
             self.last_point = None
     else:
         if self.last_point is None:
             self.last_point = point_from_pose(self.tool_path[0])
         current_point = point_from_pose(self.end_effector.get_tool_pose())
         self.handles.append(
             add_line(self.last_point, current_point, color=RED))
         self.last_point = current_point
     return current_conf
Ejemplo n.º 14
0
    def score_fn(plan):
        assert plan is not None
        final_pose = world.get_pose(bowl_name)
        point_distance = get_distance(point_from_pose(initial_pose),
                                      point_from_pose(final_pose))  #, norm=2)
        quat_distance = quat_angle_between(quat_from_pose(initial_pose),
                                           quat_from_pose(final_pose))
        print('Translation: {:.5f} m | Rotation: {:.5f} rads'.format(
            point_distance, quat_distance))

        with ClientSaver(world.client):
            # TODO: lift the bowl up (with particles around) to prevent scale detections
            final_bowl_beads = get_contained_beads(bowl_body, init_beads)
            fraction_bowl = safe_ratio(len(final_bowl_beads),
                                       len(init_beads),
                                       undefined=0)
            mass_in_bowl = sum(map(get_mass, final_bowl_beads))
            final_cup_beads = get_contained_beads(cup_body, init_beads)
            fraction_cup = safe_ratio(len(final_cup_beads),
                                      len(init_beads),
                                      undefined=0)
            mass_in_cup = sum(map(get_mass, final_cup_beads))
        print('In Bowl: {} | In Cup: {}'.format(fraction_bowl, fraction_cup))

        score = {
            # Displacements
            'bowl_translation': point_distance,
            'bowl_rotation': quat_distance,
            # Masses
            'mass_in_bowl': mass_in_bowl,
            'mass_in_cup': mass_in_cup,
            # Counts
            'bowl_beads': len(final_bowl_beads),
            'cup_beads': len(final_cup_beads),
            # Fractions
            'fraction_in_bowl': fraction_bowl,
            'fraction_in_cup': fraction_cup,
        }
        score.update(latent)
        # TODO: store the cup path length to bias towards shorter paths

        #_, args = find_unique(lambda a: a[0] == 'pour', plan)
        #control = args[-1]
        #feature = control['feature']
        #parameter = control['parameter']
        return score
Ejemplo n.º 15
0
 def get_nearby(self, target_pose, radius=NEARBY_RADIUS):
     # TODO: could instead use the probability density
     target_point = np.array(
         point_from_pose(target_pose.get_reference_from_body()))
     draw_circle(target_point,
                 radius,
                 parent=target_pose.reference_body,
                 parent_link=target_pose.reference_link)
     poses = set()
     for pose in self.dist.support():
         if target_pose.support != pose.support:
             continue
         point = point_from_pose(pose.get_reference_from_body())
         delta = target_point - point
         if np.linalg.norm(delta[:2]) < radius:
             poses.add(pose)
     prob = sum(map(self.discrete_prob, poses))
     #poses = {target_pose}
     return Neighborhood(poses, prob)
Ejemplo n.º 16
0
def compute_detectable(poses, camera_pose):
    detectable_poses = set()
    for pose in poses:
        point = point_from_pose(pose.get_world_from_body())
        if is_visible_point(CAMERA_MATRIX,
                            KINECT_DEPTH,
                            point,
                            camera_pose=camera_pose):
            detectable_poses.add(pose)
    return detectable_poses
Ejemplo n.º 17
0
 def extract_point(loc):
     if isinstance(loc, str):
         name = loc.split('-')[0]
         conf = initial_confs[name]
         conf.assign()
         robot = index_from_name(robots, name)
         return point_from_pose(
             get_link_pose(robot, link_from_name(robot, TOOL_LINK)))
     else:
         return node_points[loc]
Ejemplo n.º 18
0
    def score_fn(plan):
        assert plan is not None
        with ClientSaver(world.client):
            rgb_image = take_image(world, bowl_body, beads_per_color)
            values = score_image(rgb_image, bead_colors, beads_per_color)

        final_pose = perception.get_pose(bowl_name)
        point_distance = get_distance(point_from_pose(initial_pose),
                                      point_from_pose(final_pose))  #, norm=2)
        quat_distance = quat_angle_between(quat_from_pose(initial_pose),
                                           quat_from_pose(final_pose))
        print('Translation: {:.5f} m | Rotation: {:.5f} rads'.format(
            point_distance, quat_distance))

        with ClientSaver(world.client):
            all_beads = list(flatten(beads_per_color))
            bowl_beads = get_contained_beads(bowl_body, all_beads)
            fraction_bowl = float(
                len(bowl_beads)) / len(all_beads) if all_beads else 0
        print('In Bowl: {}'.format(fraction_bowl))

        with ClientSaver(world.client):
            final_dispersion = compute_dispersion(bowl_body, beads_per_color)
        print('Initial Dispersion: {:.3f} | Final Dispersion {:.3f}'.format(
            initial_distance, final_dispersion))

        score = {
            'bowl_translation': point_distance,
            'bowl_rotation': quat_distance,
            'fraction_in_bowl': fraction_bowl,
            'initial_dispersion': initial_distance,
            'final_dispersion': final_dispersion,
            'num_beads': len(all_beads),  # Beads per color
            DYNAMICS: parameters_from_name,
        }
        # TODO: include time required for stirring
        # TODO: change in dispersion

        #wait_for_user()
        #_, args = find_unique(lambda a: a[0] == 'stir', plan)
        #control = args[-1]
        return score
Ejemplo n.º 19
0
def compute_visible(body, poses, camera_pose, draw=True):
    ordered_poses = list(poses)
    rays = []
    camera_point = point_from_pose(camera_pose)
    for pose in ordered_poses:
        point = point_from_pose(pose.get_world_from_body())
        rays.append(Ray(camera_point, point))
    ray_results = batch_ray_collision(rays)
    if draw:
        with LockRenderer():
            handles = []
            for ray, result in zip(rays, ray_results):
                handles.extend(draw_ray(ray, result))
    # Blocking objects will likely be known with high probability
    # TODO: move objects out of the way?
    return {
        pose
        for pose, result in zip(ordered_poses, ray_results)
        if result.objectUniqueId in (body, -1)
    }
Ejemplo n.º 20
0
def score_poses(problem, task, ros_world):
    cup_name, bowl_name = REQUIREMENT_FNS[problem](ros_world)
    name_from_type = {'cup': cup_name, 'bowl': bowl_name}
    initial_poses = {
        ty: ros_world.get_pose(name)
        for ty, name in name_from_type.items()
    }
    final_stackings = wait_until_observation(problem, ros_world)
    #final_stackings = None
    if final_stackings is None:
        # TODO: only do this for the bad bowl
        point_distances = {ty: INF for ty in name_from_type}
        quat_distances = {ty: 2 * np.pi
                          for ty in name_from_type}  # Max rotation
    else:
        final_poses = {
            ty: ros_world.get_pose(name)
            for ty, name in name_from_type.items()
        }
        point_distances = {
            ty: get_distance(point_from_pose(initial_poses[ty]),
                             point_from_pose(final_poses[ty]))
            for ty in name_from_type
        }
        # TODO: wrap around distance for symmetric things
        quat_distances = {
            ty: quat_angle_between(quat_from_pose(initial_poses[ty]),
                                   quat_from_pose(final_poses[ty]))
            for ty in name_from_type
        }
    score = {}
    for ty in sorted(name_from_type):
        print(
            '{} | translation (m): {:.3f} | rotation (degrees): {:.3f}'.format(
                ty, point_distances[ty], math.degrees(quat_distances[ty])))
        score.update({
            '{}_translation'.format(ty): point_distances[ty],
            '{}_rotation'.format(ty): quat_distances[ty],
        })
    return score
Ejemplo n.º 21
0
    def fn(obj_name, pose):
        # TODO: incorporate probability mass
        # Ether sample observation (control) or target belief (next state)
        body = world.get_body(obj_name)
        open_surface_joints(world, pose.support)
        for camera_name in world.cameras:
            camera_body, camera_matrix, camera_depth = world.cameras[
                camera_name]
            camera_pose = get_pose(camera_body)
            camera_point = point_from_pose(camera_pose)
            obj_point = point_from_pose(pose.get_world_from_body())

            aabb = get_view_aabb(body, camera_pose)
            center = get_aabb_center(aabb)
            extent = np.multiply([detect_scale, detect_scale, 1],
                                 get_aabb_extent(aabb))
            view_aabb = (center - extent / 2, center + extent / 2)
            # print(is_visible_aabb(view_aabb, camera_matrix=camera_matrix))
            obj_points = apply_affine(
                camera_pose, support_from_aabb(view_aabb)) + [obj_point]
            # obj_points = [obj_point]
            if not all(
                    is_visible_point(camera_matrix, camera_depth, point,
                                     camera_pose) for point in obj_points):
                continue
            rays = [Ray(camera_point, point) for point in obj_points]
            detect = Detect(world, camera_name, obj_name, pose, rays)
            if ray_trace:
                # TODO: how should doors be handled?
                move_occluding(world)
                open_surface_joints(world, pose.support)
                detect.pose.assign()
                if obstacles & detect.compute_occluding():
                    continue
            #detect.draw()
            #wait_for_user()
            return (detect, )
        return None
Ejemplo n.º 22
0
def inverse_kinematics(arm, pose, torso, upper):
    from pybullet_tools.pr2_ik.ikLeft import leftIK
    from pybullet_tools.pr2_ik.ikRight import rightIK
    arm_ik = {
        'left': leftIK,
        'right': rightIK,
    }
    ik_fn = arm_ik[arm]
    pos = point_from_pose(pose)
    rot = matrix_from_quat(quat_from_pose(pose)).tolist()
    solutions = ik_fn(list(rot), list(pos), [torso, upper])
    if solutions is None:
        return []
    return solutions
Ejemplo n.º 23
0
def optimize_angle(end_effector,
                   element_pose,
                   translation,
                   direction,
                   reverse,
                   candidate_angles,
                   collision_fn,
                   nearby=True,
                   max_error=TRANSLATION_TOLERANCE):
    robot = end_effector.robot
    movable_joints = get_movable_joints(robot)
    best_error, best_angle, best_conf = max_error, None, None
    initial_conf = get_joint_positions(robot, movable_joints)
    for angle in candidate_angles:
        grasp_pose = get_grasp_pose(translation, direction, angle, reverse)
        # Pose_{world,EE} = Pose_{world,element} * Pose_{element,EE}
        #                 = Pose_{world,element} * (Pose_{EE,element})^{-1}
        target_pose = multiply(element_pose, invert(grasp_pose))
        set_pose(end_effector.body,
                 multiply(target_pose, end_effector.tool_from_ee))

        if nearby:
            set_joint_positions(robot, movable_joints, initial_conf)
        conf = solve_ik(end_effector, target_pose, nearby=nearby)
        if (conf is None) or collision_fn(conf):
            continue

        set_joint_positions(robot, movable_joints, conf)
        link_pose = get_link_pose(robot, end_effector.tool_link)
        error = get_distance(point_from_pose(target_pose),
                             point_from_pose(link_pose))
        if error < best_error:  # TODO: error a function of direction as well
            best_error, best_angle, best_conf = error, angle, conf
            #break
    if best_conf is not None:
        set_joint_positions(robot, movable_joints, best_conf)
    return best_angle, best_conf
Ejemplo n.º 24
0
 def test(object_name, pose, base_conf):
     if object_name in ALL_SURFACES:
         surface_name = object_name
         if surface_name not in vertices_from_surface:
             vertices_from_surface[surface_name] = grow_polygon(
                 map(point_from_pose,
                     load_inverse_placements(world, surface_name)),
                 radius=GROW_INVERSE_BASE)
         if not vertices_from_surface[surface_name]:
             return False
         base_conf.assign()
         pose.assign()
         surface = surface_from_name(surface_name)
         world_from_surface = get_link_pose(
             world.kitchen, link_from_name(world.kitchen, surface.link))
         world_from_base = get_link_pose(world.robot, world.base_link)
         surface_from_base = multiply(invert(world_from_surface),
                                      world_from_base)
         #result = is_point_in_polygon(point_from_pose(surface_from_base), vertices_from_surface[surface_name])
         #if not result:
         #    draw_pose(surface_from_base)
         #    points = [Point(x, y, 0) for x, y, in vertices_from_surface[surface_name]]
         #    add_segments(points, closed=True)
         #    wait_for_user()
         return is_point_in_polygon(point_from_pose(surface_from_base),
                                    vertices_from_surface[surface_name])
     else:
         if not base_from_objects:
             return False
         base_conf.assign()
         pose.assign()
         world_from_base = get_link_pose(world.robot, world.base_link)
         world_from_object = pose.get_world_from_body()
         base_from_object = multiply(invert(world_from_base),
                                     world_from_object)
         return is_point_in_polygon(point_from_pose(base_from_object),
                                    base_from_objects)
Ejemplo n.º 25
0
 def test(joint_name, base_conf):
     if not DOOR_PROXIMITY:
         return True
     if joint_name not in vertices_from_joint:
         base_confs = list(load_pull_base_poses(world, joint_name))
         vertices_from_joint[joint_name] = grow_polygon(
             base_confs, radius=GROW_INVERSE_BASE)
     if not vertices_from_joint[joint_name]:
         return False
     # TODO: can't open hitman_drawer_top_joint any more
     # Likely due to conservative carter geometry
     base_conf.assign()
     base_point = point_from_pose(
         get_link_pose(world.robot, world.base_link))
     return is_point_in_polygon(base_point[:2],
                                vertices_from_joint[joint_name])
Ejemplo n.º 26
0
def is_robot_visible(world, links):
    for link in links:
        link_point = point_from_pose(get_link_pose(world.robot, link))
        visible = False
        for camera_body, camera_matrix, camera_depth in world.cameras.values():
            camera_pose = get_pose(camera_body)
            #camera_point = point_from_pose(camera_pose)
            #add_line(link_point, camera_point)
            if is_visible_point(camera_matrix, camera_depth, link_point,
                                camera_pose):
                visible = True
                break
        if not visible:
            return False
    #wait_for_user()
    return True
Ejemplo n.º 27
0
Archivo: push.py Proyecto: lyltc1/LTAMP
 def gen_fn(obj, pose1, surface, region=None):
     start_point = point_from_pose(pose1)
     distance_range = (0.15, 0.2) if region is None else (0.15, 0.3)
     obj_body = world.bodies[obj]
     while True:
         theta = random.uniform(-np.pi, np.pi)
         distance = random.uniform(*distance_range)
         end_point2d = np.array(start_point[:2]) + distance * unit_from_theta(theta)
         end_pose = (np.append(end_point2d, [start_point[2]]), quat_from_pose(pose1))
         set_pose(obj_body, end_pose)
         if not is_center_stable(obj_body, world.bodies[surface], above_epsilon=np.inf):
             continue
         if region is not None:
             assert region in ARMS
             if not reachable_test(region, obj, end_pose):
                 continue
         yield end_point2d,
Ejemplo n.º 28
0
Archivo: push.py Proyecto: lyltc1/LTAMP
def get_push_feature(world, arm, block_name, initial_pose, goal_pos2d):
    block_body = world.get_body(block_name)
    block_reference = get_reference_pose(block_name)
    _, (block_w, block_l, block_h) = approximate_as_prism(block_body, body_pose=block_reference)
    goal_pose = get_end_pose(initial_pose, goal_pos2d)
    difference_initial =  point_from_pose(multiply(invert(initial_pose), goal_pose))

    feature = {
        'arm_name': arm,
        'block_name': block_name,
        'block_type': get_type(block_name),
        'block_width': block_w,
        'block_length': block_l,
        'block_height': block_h,

        'push_yaw': get_yaw(difference_initial),
        'push_distance': get_length(difference_initial)
    }
    return feature
Ejemplo n.º 29
0
 def pose_from_pose2d(self, pose2d, surface):
     #assert surface in self.poses_from_surface
     #reference_pose = self.poses_from_surface[surface][0]
     body = self.world.get_body(self.name)
     surface_aabb = compute_surface_aabb(self.world, surface)
     world_from_surface = get_surface_reference_pose(
         self.world.kitchen, surface)
     if DIM == 2:
         x, y = pose2d[:DIM]
         yaw = np.random.uniform(*CIRCULAR_LIMITS)
     else:
         x, y, yaw = pose2d
     z = stable_z_on_aabb(
         body,
         surface_aabb) + Z_EPSILON - point_from_pose(world_from_surface)[2]
     point = Point(x, y, z)
     surface_from_body = Pose(point, Euler(yaw=yaw))
     set_pose(body, multiply(world_from_surface, surface_from_body))
     return create_relative_pose(self.world, self.name, surface)
Ejemplo n.º 30
0
def compute_assignments(robots, elements, node_points, initial_confs):
    # TODO: print direction might influence the assignment
    assignments = {name: set() for name in initial_confs}
    for element in elements:
        point = get_midpoint(node_points, element)  # min/max
        closest_robot, closest_distance = None, INF
        for i, robot in enumerate(robots):
            base_pose = get_pose(robot)
            base_point = point_from_pose(base_pose)
            point_base = tform_point(invert(base_pose), point)
            distance = get_yaw(point_base)  # which side its on
            #distance = abs((base_point - point)[0]) # x distance
            #distance = get_length((base_point - point)[:2]) # xy distance
            if distance < closest_distance:
                closest_robot, closest_distance = ROBOT_TEMPLATE.format(
                    i), distance
        assert closest_robot is not None
        # TODO: assign to several robots if close to the best distance
        assignments[closest_robot].add(element)
    return assignments