Exemplo n.º 1
0
def main(display='execute'):  # control | execute | step
    connect(use_gui=True)
    disable_real_time()
    draw_global_system()
    with HideOutput():
        robot = load_model(DRAKE_IIWA_URDF)  # KUKA_IIWA_URDF | DRAKE_IIWA_URDF
        floor = load_model('models/short_floor.urdf')
    block = load_model(BLOCK_URDF, fixed_base=False)
    set_pose(block, Pose(Point(y=0.5, z=stable_z(block, floor))))
    set_default_camera(distance=2)
    dump_world()

    saved_world = WorldSaver()
    command = plan(robot, block, fixed=[floor], teleport=False)
    if (command is None) or (display is None):
        print('Unable to find a plan!')
        return

    saved_world.restore()
    update_state()
    wait_if_gui('{}?'.format(display))
    if display == 'control':
        enable_gravity()
        command.control(real_time=False, dt=0)
    elif display == 'execute':
        command.refine(num_steps=10).execute(time_step=0.005)
    elif display == 'step':
        command.step()
    else:
        raise ValueError(display)

    print('Quit?')
    wait_if_gui()
    disconnect()
Exemplo n.º 2
0
def simulate_parallel(robots,
                      plan,
                      time_step=0.1,
                      speed_up=10.,
                      record=None):  # None | video.mp4
    # TODO: ensure the step size is appropriate
    makespan = compute_duration(plan)
    print('\nMakespan: {:.3f}'.format(makespan))
    trajectories = extract_parallel_trajectories(plan)
    if trajectories is None:
        return
    wait_if_gui('Begin?')
    num_motion = sum(action.name == 'move' for action in plan)
    with VideoSaver(record):
        for t in inclusive_range(0, makespan, time_step):
            # if action.start <= t <= get_end(action):
            executing = Counter(traj.robot for traj in trajectories
                                if traj.at(t) is not None)
            print('t={:.3f}/{:.3f} | executing={}'.format(
                t, makespan, executing))
            for robot in robots:
                num = executing.get(robot, 0)
                if 2 <= num:
                    raise RuntimeError(
                        'Robot {} simultaneously executing {} trajectories'.
                        format(robot, num))
                if (num_motion == 0) and (num == 0):
                    set_configuration(robot, DUAL_CONF)
            #step_simulation()
            wait_for_duration(time_step / speed_up)
    wait_if_gui('Finish?')
def create_inverse_reachability(robot, body, table, arm, grasp_type, max_attempts=500, num_samples=500):
    tool_link = get_gripper_link(robot, arm)
    robot_saver = BodySaver(robot)
    gripper_from_base_list = []
    grasps = GET_GRASPS[grasp_type](body)

    start_time = time.time()
    while len(gripper_from_base_list) < num_samples:
        box_pose = sample_placement(body, table)
        set_pose(body, box_pose)
        grasp_pose = random.choice(grasps)
        gripper_pose = multiply(box_pose, invert(grasp_pose))
        for attempt in range(max_attempts):
            robot_saver.restore()
            base_conf = next(uniform_pose_generator(robot, gripper_pose)) #, reachable_range=(0., 1.)))
            #set_base_values(robot, base_conf)
            set_group_conf(robot, 'base', base_conf)
            if pairwise_collision(robot, table):
                continue
            grasp_conf = pr2_inverse_kinematics(robot, arm, gripper_pose) #, nearby_conf=USE_CURRENT)
            #conf = inverse_kinematics(robot, link, gripper_pose)
            if (grasp_conf is None) or pairwise_collision(robot, table):
                continue
            gripper_from_base = multiply(invert(get_link_pose(robot, tool_link)), get_base_pose(robot))
            #wait_if_gui()
            gripper_from_base_list.append(gripper_from_base)
            print('{} / {} | {} attempts | [{:.3f}]'.format(
                len(gripper_from_base_list), num_samples, attempt, elapsed_time(start_time)))
            wait_if_gui()
            break
        else:
            print('Failed to find a kinematic solution after {} attempts'.format(max_attempts))
    return save_inverse_reachability(robot, arm, grasp_type, tool_link, gripper_from_base_list)
Exemplo n.º 4
0
def test_close_gripper(robot, arm):
    gripper_joints = get_gripper_joints(robot, arm)
    extend_fn = get_extend_fn(robot, gripper_joints)
    for positions in extend_fn(get_open_positions(robot, arm), get_closed_positions(robot, arm)):
        set_joint_positions(robot, gripper_joints, positions)
        print(positions)
        wait_if_gui('Continue?')
Exemplo n.º 5
0
def test_lis(world):
    #model = 'oil'
    model = 'soup'

    point_path = os.path.join(LIS_DIRECTORY, 'clouds', CLOUDS[model])
    mesh_path = os.path.join(LIS_DIRECTORY, 'meshes',
                             MESHES[model])  # off | ply | wrl
    #ycb_path = os.path.join(DATA_DIRECTORY, 'ycb', 'plastic_wine_cup', 'meshes', 'tsdf.stl') # stl | ply
    ycb_path = os.path.join(LIS_DIRECTORY, 'ycb', 'plastic_wine_cup',
                            'textured_meshes',
                            'optimized_tsdf_texture_mapped_mesh.obj')  # ply

    print(point_path)
    print(mesh_path)
    print(ycb_path)

    #mesh = read_mesh_off(mesh_path, scale=0.001)
    #print(mesh)
    points = read_pcd_file(point_path)
    #print(points)
    #print(convex_hull(points))
    mesh = mesh_from_points(points)
    #print(mesh)

    body = create_mesh(mesh, color=(1, 0, 0, 1))
    #print(get_num_links(body))
    #print(mesh_from_body(body))

    #set_point(body, (1, 1, 1))
    wait_if_gui()
def create_inverse_reachability2(robot, body, table, arm, grasp_type, max_attempts=500, num_samples=500):
    tool_link = get_gripper_link(robot, arm)
    problem = MockProblem(robot, fixed=[table], grasp_types=[grasp_type])
    placement_gen_fn = get_stable_gen(problem)
    grasp_gen_fn = get_grasp_gen(problem, collisions=True)
    ik_ir_fn = get_ik_ir_gen(problem, max_attempts=max_attempts, learned=False, teleport=True)
    placement_gen = placement_gen_fn(body, table)
    grasps = list(grasp_gen_fn(body))
    print('Grasps:', len(grasps))

    # TODO: sample the torso height
    # TODO: consider IK with respect to the torso frame
    start_time = time.time()
    gripper_from_base_list = []
    while len(gripper_from_base_list) < num_samples:
        [(p,)] = next(placement_gen)
        (g,) = random.choice(grasps)
        output = next(ik_ir_fn(arm, body, p, g), None)
        if output is None:
            print('Failed to find a solution after {} attempts'.format(max_attempts))
        else:
            (_, ac) = output
            [at,] = ac.commands
            at.path[-1].assign()
            gripper_from_base = multiply(invert(get_link_pose(robot, tool_link)), get_base_pose(robot))
            gripper_from_base_list.append(gripper_from_base)
            print('{} / {} [{:.3f}]'.format(
                len(gripper_from_base_list), num_samples, elapsed_time(start_time)))
            wait_if_gui()
    return save_inverse_reachability(robot, arm, grasp_type, tool_link, gripper_from_base_list)
Exemplo n.º 7
0
def test_drake_base_motion(pr2, base_start, base_goal, obstacles=[]):
    # TODO: combine this with test_arm_motion
    """
    Drake's PR2 URDF has explicit base joints
    """
    disabled_collisions = get_disabled_collisions(pr2)
    base_joints = [joint_from_name(pr2, name) for name in PR2_GROUPS['base']]
    set_joint_positions(pr2, base_joints, base_start)
    base_joints = base_joints[:2]
    base_goal = base_goal[:len(base_joints)]
    wait_if_gui('Plan Base?')
    with LockRenderer(lock=False):
        base_path = plan_joint_motion(pr2,
                                      base_joints,
                                      base_goal,
                                      obstacles=obstacles,
                                      disabled_collisions=disabled_collisions)
    if base_path is None:
        print('Unable to find a base path')
        return
    print(len(base_path))
    for bq in base_path:
        set_joint_positions(pr2, base_joints, bq)
        if SLEEP is None:
            wait_if_gui('Continue?')
        else:
            wait_for_duration(SLEEP)
Exemplo n.º 8
0
def main():
    connect(use_gui=True)
    print(get_json_filenames())

    #problem_filenames = sorted(os.listdir(openrave_directory))
    #problem_filenames = ['{}.json'.format(name) for name in FFROB]
    #problem_filenames = ['sink_stove_4_30.json'] # 'dinner.json' | 'simple.json'
    problem_filenames = ['simple.json'] # 'dinner.json' | 'simple.json'

    # Mac width/height
    #width = 2560
    #height = 1600
    #
    #640, 480
    screenshot = False

    for problem_filename in problem_filenames:
        load_json_problem(problem_filename)
        if screenshot:
            problem_name = problem_filename.split('.')[0]
            image_filename = "{}.png".format(problem_name)
            image_path = os.path.join(SCREENSHOT_DIR, image_filename)
            wait_for_interrupt(max_time=0.5)
            os.system("screencapture -R {},{},{},{} {}".format(
                225, 200, 600, 500, image_path))
        else:
            wait_if_gui()
    disconnect()
Exemplo n.º 9
0
def test_arm_control(pr2, left_joints, arm_start):
    wait_if_gui('Control Arm?')
    real_time = False
    enable_gravity()
    p.setRealTimeSimulation(real_time)
    for _ in joint_controller(pr2, left_joints, arm_start):
        if not real_time:
            p.stepSimulation()
Exemplo n.º 10
0
def main():
    connect(use_gui=True)
    with HideOutput():
        pr2 = load_model(DRAKE_PR2_URDF)
    set_joint_positions(pr2, joints_from_names(pr2, PR2_GROUPS['left_arm']),
                        REST_LEFT_ARM)
    set_joint_positions(pr2, joints_from_names(pr2, PR2_GROUPS['right_arm']),
                        rightarm_from_leftarm(REST_LEFT_ARM))
    set_joint_positions(pr2, joints_from_names(pr2, PR2_GROUPS['torso']),
                        [0.2])
    dump_body(pr2)

    block = load_model(BLOCK_URDF, fixed_base=False)
    set_point(block, [2, 0.5, 1])
    target_point = point_from_pose(get_pose(block))

    # head_link = link_from_name(pr2, HEAD_LINK)
    head_joints = joints_from_names(pr2, PR2_GROUPS['head'])
    head_link = head_joints[-1]

    #max_detect_distance = 2.5
    max_register_distance = 1.0
    distance_range = (max_register_distance / 2, max_register_distance)
    base_generator = visible_base_generator(pr2, target_point, distance_range)

    base_joints = joints_from_names(pr2, PR2_GROUPS['base'])
    for i in range(5):
        base_conf = next(base_generator)
        set_joint_positions(pr2, base_joints, base_conf)

        p.addUserDebugLine(point_from_pose(get_link_pose(pr2, head_link)),
                           target_point,
                           lineColorRGB=(1, 0, 0))  # addUserDebugText
        p.addUserDebugLine(point_from_pose(
            get_link_pose(pr2, link_from_name(pr2, HEAD_LINK_NAME))),
                           target_point,
                           lineColorRGB=(0, 0, 1))  # addUserDebugText

        # head_conf = sub_inverse_kinematics(pr2, head_joints[0], HEAD_LINK, )
        head_conf = inverse_visibility(pr2, target_point)
        assert head_conf is not None
        set_joint_positions(pr2, head_joints, head_conf)
        print(get_detections(pr2))
        # TODO: does this detect the robot sometimes?

        detect_mesh, z = get_detection_cone(pr2, block)
        detect_cone = create_mesh(detect_mesh, color=(0, 1, 0, 0.5))
        set_pose(detect_cone,
                 get_link_pose(pr2, link_from_name(pr2, HEAD_LINK_NAME)))
        view_cone = get_viewcone(depth=2.5, color=(1, 0, 0, 0.25))
        set_pose(view_cone,
                 get_link_pose(pr2, link_from_name(pr2, HEAD_LINK_NAME)))
        wait_if_gui()
        remove_body(detect_cone)
        remove_body(view_cone)

    disconnect()
Exemplo n.º 11
0
def main():
    connect(use_gui=True)

    with HideOutput():
        pr2 = load_model("models/pr2_description/pr2.urdf")
    test_clone_robot(pr2)
    test_clone_arm(pr2)

    wait_if_gui('Finish?')
    disconnect()
Exemplo n.º 12
0
def main(use_pr2_drake=True):
    connect(use_gui=True)
    add_data_path()

    plane = p.loadURDF("plane.urdf")
    table_path = "models/table_collision/table.urdf"
    table = load_pybullet(table_path, fixed_base=True)
    set_quat(table, quat_from_euler(Euler(yaw=PI / 2)))
    # table/table.urdf, table_square/table_square.urdf, cube.urdf, block.urdf, door.urdf
    obstacles = [plane, table]

    pr2_urdf = DRAKE_PR2_URDF if use_pr2_drake else PR2_URDF
    with HideOutput():
        pr2 = load_model(pr2_urdf, fixed_base=True)  # TODO: suppress warnings?
    dump_body(pr2)

    z = base_aligned_z(pr2)
    print(z)
    #z = stable_z_on_aabb(pr2, AABB(np.zeros(3), np.zeros(3)))
    print(z)

    set_point(pr2, Point(z=z))
    print(get_aabb(pr2))
    wait_if_gui()

    base_start = (-2, -2, 0)
    base_goal = (2, 2, 0)
    arm_start = SIDE_HOLDING_LEFT_ARM
    #arm_start = TOP_HOLDING_LEFT_ARM
    #arm_start = REST_LEFT_ARM
    arm_goal = TOP_HOLDING_LEFT_ARM
    #arm_goal = SIDE_HOLDING_LEFT_ARM

    left_joints = joints_from_names(pr2, PR2_GROUPS['left_arm'])
    right_joints = joints_from_names(pr2, PR2_GROUPS['right_arm'])
    torso_joints = joints_from_names(pr2, PR2_GROUPS['torso'])
    set_joint_positions(pr2, left_joints, arm_start)
    set_joint_positions(pr2, right_joints,
                        rightarm_from_leftarm(REST_LEFT_ARM))
    set_joint_positions(pr2, torso_joints, [0.2])
    open_arm(pr2, 'left')
    # test_ikfast(pr2)

    add_line(base_start, base_goal, color=RED)
    print(base_start, base_goal)
    if use_pr2_drake:
        test_drake_base_motion(pr2, base_start, base_goal, obstacles=obstacles)
    else:
        test_base_motion(pr2, base_start, base_goal, obstacles=obstacles)

    test_arm_motion(pr2, left_joints, arm_goal)
    # test_arm_control(pr2, left_joints, arm_start)

    wait_if_gui('Finish?')
    disconnect()
def simulate_curve(robot, joints, curve):
    #set_joint_positions(robot, joints, curve(random.uniform(curve.x[0], curve.x[-1])))
    wait_if_gui(message='Begin?')
    #controller = follow_curve_old(robot, joints, curve)
    controller = follow_curve(robot, joints, curve)
    for _ in controller:
        step_simulation()
        #wait_if_gui()
        #wait_for_duration(duration=time_step)
        #time.sleep(time_step)
    wait_if_gui(message='Finish?')
Exemplo n.º 14
0
def test_clone_arm(pr2):
    first_joint_name = PR2_GROUPS['left_arm'][0]
    first_joint = joint_from_name(pr2, first_joint_name)
    parent_joint = get_link_parent(pr2, first_joint)
    print(get_link_name(pr2, parent_joint), parent_joint, first_joint_name,
          first_joint)
    print(get_link_descendants(pr2,
                               first_joint))  # TODO: least common ancestor
    links = [first_joint] + get_link_descendants(pr2, first_joint)
    new_arm = clone_body(pr2, links=links, collision=False)
    dump_world()
    set_base_values(pr2, (-2, 0, 0))
    wait_if_gui()
Exemplo n.º 15
0
def test_arm_motion(pr2, left_joints, arm_goal):
    disabled_collisions = get_disabled_collisions(pr2)
    wait_if_gui('Plan Arm?')
    with LockRenderer(lock=False):
        arm_path = plan_joint_motion(pr2, left_joints, arm_goal, disabled_collisions=disabled_collisions)
    if arm_path is None:
        print('Unable to find an arm path')
        return
    print(len(arm_path))
    for q in arm_path:
        set_joint_positions(pr2, left_joints, q)
        #wait_if_gui('Continue?')
        wait_for_duration(0.01)
Exemplo n.º 16
0
def test_kinematic(robot, door, target_x):
    wait_if_gui('Begin?')
    robot_joints = get_movable_joints(robot)[:3]
    joint = robot_joints[0]
    start_x = get_joint_position(robot, joint)
    num_steps = int(math.ceil(abs(target_x - start_x) / 1e-2))
    for x in interpolate(start_x, target_x, num_steps=num_steps):
        set_joint_position(robot, joint=joint, value=x)
        #with LockRenderer():
        solve_collision_free(door, robot, draw=False)
        wait_for_duration(duration=1e-2)
        #wait_if_gui()
    wait_if_gui('Finish?')
Exemplo n.º 17
0
def test_door(door):
    door_joints = get_movable_joints(door)

    sample_fn = get_sample_fn(door, door_joints)
    set_joint_positions(door, door_joints, sample_fn())
    while True:
        positions = sample_fn()
        set_joint_positions(door, door_joints, positions)
        wait_if_gui()

    lower, upper = get_joint_intervals(door, door_joints)
    control_joints(door, door_joints, positions=lower)
    velocity_control_joints(door, door_joints,
                            velocities=[PI / 4])  # Able to exceed limits
Exemplo n.º 18
0
def test_arm_motion(pr2, left_joints, arm_goal):
    disabled_collisions = get_disabled_collisions(pr2)
    wait_if_gui('Plan Arm?')
    arm_path = plan_joint_motion(pr2,
                                 left_joints,
                                 arm_goal,
                                 disabled_collisions=disabled_collisions)
    if arm_path is None:
        print('Unable to find an arm path')
        return
    print(len(arm_path))
    for q in arm_path:
        set_joint_positions(pr2, left_joints, q)
        #wait_if_gui('Continue?')
        time.sleep(0.01)
Exemplo n.º 19
0
def test_grasps(robot, block):
    for arm in ARMS:
        gripper_joints = get_gripper_joints(robot, arm)
        tool_link = link_from_name(robot, TOOL_LINK.format(arm))
        tool_pose = get_link_pose(robot, tool_link)
        #handles = draw_pose(tool_pose)
        #grasps = get_top_grasps(block, under=True, tool_pose=unit_pose())
        grasps = get_side_grasps(block, under=True, tool_pose=unit_pose())
        for i, grasp_pose in enumerate(grasps):
            block_pose = multiply(tool_pose, grasp_pose)
            set_pose(block, block_pose)
            close_until_collision(robot, gripper_joints, bodies=[block], open_conf=get_open_positions(robot, arm),
                                  closed_conf=get_closed_positions(robot, arm))
            handles = draw_pose(block_pose)
            wait_if_gui('Grasp {}'.format(i))
            remove_handles(handles)
Exemplo n.º 20
0
def test_ycb(world):
    name = 'mustard_bottle'  # potted_meat_can | cracker_box | sugar_box | mustard_bottle | bowl
    path_from_name = get_ycb_objects()
    print(path_from_name)

    ycb_directory = os.path.join(YCB_DIRECTORY, path_from_name[name], SENSOR)
    obj_path = os.path.join(ycb_directory, OBJ_PATH)
    image_path = os.path.join(ycb_directory, PNG_PATH)
    print(obj_path)
    #body = load_pybullet(obj_path) #, fixed_base=True)
    body = create_obj(obj_path, color=WHITE)
    set_texture(body, image_path)

    for link in get_all_links(body):
        set_color(body, link=link, color=WHITE)
    wait_if_gui()
Exemplo n.º 21
0
def test_base_motion(pr2, base_start, base_goal, obstacles=[]):
    #disabled_collisions = get_disabled_collisions(pr2)
    set_base_values(pr2, base_start)
    wait_if_gui('Plan Base-pr2?')
    base_limits = ((-2.5, -2.5), (2.5, 2.5))
    with LockRenderer(lock=False):
        base_path = plan_base_motion(pr2, base_goal, base_limits, obstacles=obstacles)
    if base_path is None:
        print('Unable to find a base path')
        return
    print(len(base_path))
    for bq in base_path:
        set_base_values(pr2, bq)
        if SLEEP is None:
            wait_if_gui('Continue?')
        else:
            wait_for_duration(SLEEP)
Exemplo n.º 22
0
def simulate_parallel(robots,
                      plan,
                      time_step=0.1,
                      speed_up=10.,
                      record=None):  # None | video.mp4
    # TODO: ensure the step size is appropriate
    makespan = compute_duration(plan)
    print('\nMakespan: {:.3f}'.format(makespan))
    if plan is None:
        return
    trajectories = []
    for action in plan:
        command = action.args[-1]
        if (action.name == 'move') and (command.start_conf is
                                        action.args[-2].positions):
            command = command.reverse()
        command.retime(start_time=action.start)
        #print(action)
        #print(action.start, get_end(action), action.duration)
        #print(command.start_time, command.end_time, command.duration)
        #for traj in command.trajectories:
        #    print(traj, traj.start_time, traj.end_time, traj.duration)
        trajectories.extend(command.trajectories)
    #print(sum(traj.duration for traj in trajectories))
    num_motion = sum(action.name == 'move' for action in plan)

    wait_if_gui('Begin?')
    with VideoSaver(record):
        for t in inclusive_range(0, makespan, time_step):
            # if action.start <= t <= get_end(action):
            executing = Counter(traj.robot for traj in trajectories
                                if traj.at(t) is not None)
            print('t={:.3f}/{:.3f} | executing={}'.format(
                t, makespan, executing))
            for robot in robots:
                num = executing.get(robot, 0)
                if 2 <= num:
                    raise RuntimeError(
                        'Robot {} simultaneously executing {} trajectories'.
                        format(robot, num))
                if (num_motion == 0) and (num == 0):
                    set_configuration(robot, DUAL_CONF)
            #step_simulation()
            wait_for_duration(time_step / speed_up)
    wait_if_gui('Finish?')
Exemplo n.º 23
0
def test_clone_robot(pr2):
    # TODO: j toggles frames, p prints timings, w is wire, a is boxes
    new_pr2 = clone_body(pr2, visual=True, collision=False)
    #new_pr2 = clone_body_editor(pr2, visual=True, collision=True)
    dump_world()
    #print(load_srdf_collisions())
    #print(load_dae_collisions())

    # TODO: some unimportant quats are off for both URDF and other
    # TODO: maybe all the frames are actually correct when I load things this way?
    # TODO: the visual geometries are off but not the collision frames?
    """
    import numpy as np
    for link in get_links(pr2):
        if not get_visual_data(new_pr2, link): # pybullet.error: Error receiving visual shape info?
            continue
        #print(get_link_name(pr2, link))
        data1 = VisualShapeData(*get_visual_data(pr2, link)[0])
        data2 = VisualShapeData(*get_visual_data(new_pr2, link)[0])
        pose1 = (data1.localVisualFrame_position, data1.localVisualFrame_orientation)
        pose2 = (data2.localVisualFrame_position, data2.localVisualFrame_orientation)

        #pose1 = get_link_pose(pr2, link) # Links are fine
        #pose2 = get_link_pose(new_pr2, link)
        #pose1 = get_joint_parent_frame(pr2, link)
        #pose2 = get_joint_parent_frame(new_pr2, link)
        #pose1 = get_joint_inertial_pose(pr2, link) # Inertia is fine
        #pose2 = get_joint_inertial_pose(new_pr2, link)
        if not np.allclose(pose1[0], pose2[0], rtol=0, atol=1e-3):
            print('Point', get_link_name(pr2, link), link, pose1[0], pose2[0])
            #print(data1)
            #print(data2)
            #print(get_joint_parent_frame(pr2, link), get_joint_parent_frame(new_pr2, link))
            print(get_joint_inertial_pose(pr2, link)) #, get_joint_inertial_pose(new_pr2, link))
            print()
        if not np.allclose(euler_from_quat(pose1[1]), euler_from_quat(pose2[1]), rtol=0, atol=1e-3):
            print('Quat', get_link_name(pr2, link), link, euler_from_quat(pose1[1]), euler_from_quat(pose2[1]))
        joint_info1 = get_joint_info(pr2, link)
        joint_info2 = get_joint_info(new_pr2, link)
        # TODO: the axis is off for some of these
        if not np.allclose(joint_info1.jointAxis, joint_info2.jointAxis, rtol=0, atol=1e-3):
            print('Axis', get_link_name(pr2, link), link, joint_info1.jointAxis, joint_info2.jointAxis)
    """
    set_base_values(new_pr2, (2, 0, 0))
    wait_if_gui()
Exemplo n.º 24
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-shape',
                        default='box',
                        choices=['box', 'sphere', 'cylinder', 'capsule'])
    parser.add_argument('-video', action='store_true')
    args = parser.parse_args()
    video = 'video.mp4' if args.video else None

    connect(use_gui=True, mp4=video)
    if video is not None:
        set_renderer(enable=False)

    draw_global_system()
    set_camera_pose(camera_point=Point(+1.5, -1.5, +1.5),
                    target_point=Point(-1.5, +1.5, 0))

    add_data_path()
    plane = load_pybullet('plane.urdf', fixed_base=True)
    #plane = load_model('plane.urdf')
    set_point(plane, Point(z=-1e-3))

    ramp = create_ramp()
    dump_body(ramp)

    obj = create_object(args.shape)
    set_euler(obj, np.random.uniform(-np.math.radians(1), np.math.radians(1),
                                     3))
    set_point(obj, np.random.uniform(-1e-3, +1e-3, 3))
    #set_velocity(obj, linear=Point(x=-1))
    set_position(obj, z=2.)
    #set_position(obj, z=base_aligned_z(obj))
    dump_body(obj)

    #add_pose_constraint(obj, pose=(target_point, target_quat), max_force=200)

    set_renderer(enable=True)
    if video is None:
        wait_if_gui('Begin?')
    simulate(controller=condition_controller(lambda step:
                                             (step != 0) and at_rest(obj)))
    if video is None:
        wait_if_gui('Finish?')
    disconnect()
Exemplo n.º 25
0
def test_ikfast(pr2):
    from pybullet_tools.ikfast.pr2.ik import get_tool_pose, get_ik_generator
    left_joints = joints_from_names(pr2, PR2_GROUPS['left_arm'])
    #right_joints = joints_from_names(pr2, PR2_GROUPS['right_arm'])
    torso_joints = joints_from_names(pr2, PR2_GROUPS['torso'])
    torso_left = torso_joints + left_joints
    print(get_link_pose(pr2, link_from_name(pr2, 'l_gripper_tool_frame')))
    # print(forward_kinematics('left', get_joint_positions(pr2, torso_left)))
    print(get_tool_pose(pr2, 'left'))

    arm = 'left'
    pose = get_tool_pose(pr2, arm)
    generator = get_ik_generator(pr2, arm, pose, torso_limits=False)
    for i in range(100):
        solutions = next(generator)
        print(i, len(solutions))
        for q in solutions:
            set_joint_positions(pr2, torso_left, q)
            wait_if_gui()
Exemplo n.º 26
0
def main():
    benchmark = 'tmp-benchmark-data'
    problem = 'problem1'  # Hanoi
    #problem = 'problem2' # Blocksworld
    #problem = 'problem3' # Clutter
    #problem = 'problem4' # Nonmono

    root_directory = os.path.dirname(os.path.abspath(__file__))
    directory = os.path.join(root_directory, '..', 'problems', benchmark,
                             problem)
    [mesh_directory] = list(
        filter(os.path.isdir,
               (os.path.join(directory, o)
                for o in os.listdir(directory) if o.endswith('meshes'))))
    [xml_path] = [
        os.path.join(directory, o) for o in os.listdir(directory)
        if o.endswith('xml')
    ]
    if os.path.isdir(xml_path):
        xml_path = glob.glob(os.path.join(xml_path, '*.xml'))[0]

    print(mesh_directory)
    print(xml_path)
    xml_data = etree.parse(xml_path)

    connect(use_gui=True)
    #add_data_path()
    #load_pybullet("plane.urdf")
    draw_global_system()
    set_camera_pose(camera_point=[+1, -1, 1])

    #root = xml_data.getroot()
    #print(root.items())
    for obj in xml_data.findall('/objects/obj'):
        parse_object(obj, mesh_directory)
    for robot in xml_data.findall('/robots/robot'):
        parse_robot(robot)
    wait_if_gui()
    disconnect()
Exemplo n.º 27
0
def simulate_printing(node_points, trajectories, time_step=0.1, speed_up=10.):
    # TODO: deprecate
    print_trajectories = [
        traj for traj in trajectories if isinstance(traj, PrintTrajectory)
    ]
    handles = []
    current_time = 0.
    current_traj = print_trajectories.pop(0)
    current_curve = current_traj.interpolate_tool(node_points,
                                                  start_time=current_time)
    current_position = current_curve.y[0]
    while True:
        print('Time: {:.3f} | Remaining: {} | Segments: {}'.format(
            current_time, len(print_trajectories), len(handles)))
        end_time = current_curve.x[-1]
        if end_time < current_time + time_step:
            handles.append(
                add_line(current_position, current_curve.y[-1], color=RED))
            if not print_trajectories:
                break
            current_traj = print_trajectories.pop(0)
            current_curve = current_traj.interpolate_tool(node_points,
                                                          start_time=end_time)
            current_position = current_curve.y[0]
            print(
                'New trajectory | Start time: {:.3f} | End time: {:.3f} | Duration: {:.3f}'
                .format(current_curve.x[0], current_curve.x[-1],
                        current_curve.x[-1] - current_curve.x[0]))
        else:
            current_time += time_step
            new_position = current_curve(current_time)
            handles.append(add_line(current_position, new_position, color=RED))
            current_position = new_position
            # TODO: longer wait for recording videos
            wait_for_duration(time_step / speed_up)
            # wait_if_gui()
    wait_if_gui()
    return handles
Exemplo n.º 28
0
def test_simulation(robot, target_x, video=None):
    use_turtlebot = (get_body_name(robot) == 'turtlebot')
    if not use_turtlebot:
        target_point, target_quat = map(list, get_pose(robot))
        target_point[0] = target_x
        add_pose_constraint(robot,
                            pose=(target_point, target_quat),
                            max_force=200)  # TODO: velocity constraint?
    else:
        # p.changeDynamics(robot, robot_joints[0], # Doesn't work
        #                  maxJointVelocity=1,
        #                  jointLimitForce=1,)
        robot_joints = get_movable_joints(robot)[:3]
        print('Max velocities:', get_max_velocities(robot, robot_joints))
        print('Max forces:', get_max_forces(robot, robot_joints))
        control_joint(robot,
                      joint=robot_joints[0],
                      position=target_x,
                      velocity=0,
                      position_gain=None,
                      velocity_scale=None,
                      max_velocity=100,
                      max_force=300)
        # control_joints(robot, robot_joints, positions=[target_x, 0, PI], max_force=300)
        # velocity_control_joints(robot, robot_joints, velocities=[-2., 0, 0]) #, max_force=300)

    robot_link = get_first_link(robot)
    if video is None:
        wait_if_gui('Begin?')
    simulate(controller=condition_controller(
        lambda *args: abs(target_x - point_from_pose(
            get_link_pose(robot, robot_link))[0]) < 1e-3),
             sleep=0.01)  # TODO: velocity condition
    # print('Velocities:', get_joint_velocities(robot, robot_joints))
    # print('Torques:', get_joint_torques(robot, robot_joints))
    if video is None:
        set_renderer(enable=True)
        wait_if_gui('Finish?')
def step_curve(robot, joints, path, step_size=None):
    wait_if_gui(message='Begin?')
    for i, conf in enumerate(path):
        set_joint_positions(robot, joints, conf)
        if step_size is None:
            wait_if_gui(message='{}/{} Continue?'.format(i, len(path)))
        else:
            wait_for_duration(duration=step_size)
    wait_if_gui(message='Finish?')
Exemplo n.º 30
0
def save_inverse_reachability(robot, arm, grasp_type, tool_link, gripper_from_base_list):
    # TODO: store value of torso and roll joint for the IK database. Sample the roll joint.
    # TODO: hash the pr2 urdf as well
    filename = IR_FILENAME.format(grasp_type, arm)
    path = get_database_file(filename)
    data = {
        'filename': filename,
        'robot': get_body_name(robot),
        'grasp_type': grasp_type,
        'arm': arm,
        'torso': get_group_conf(robot, 'torso'),
        'carry_conf': get_carry_conf(arm, grasp_type),
        'tool_link': tool_link,
        'ikfast': is_ik_compiled(),
        'gripper_from_base': gripper_from_base_list,
    }
    write_pickle(path, data)

    if has_gui():
        handles = []
        for gripper_from_base in gripper_from_base_list:
            handles.extend(draw_point(point_from_pose(gripper_from_base), color=(1, 0, 0)))
        wait_if_gui()
    return path