def test_frame_variant_generator(viewer):
    pose = unit_pose()
    frame = frame_from_pose(pose)

    options = {'delta_yaw': np.pi / 6, 'yaw_sample_size': 30}
    frame_gen = PyChoreoFiniteEulerAngleVariantGenerator(
        options).generate_frame_variant
    with PyChoreoClient(viewer=viewer) as client:
        draw_pose(pose)
        cnt = 0
        for frame in frame_gen(frame):
            draw_pose(pose_from_frame(frame))
            cnt += 1
        assert cnt == options['yaw_sample_size']
        wait_if_gui()
        remove_all_debug()

        # overwrite class options
        cnt = 0
        new_options = {'delta_yaw': np.pi / 3, 'yaw_sample_size': 60}
        for frame in frame_gen(frame, new_options):
            draw_pose(pose_from_frame(frame))
            cnt += 1
        assert cnt == new_options['yaw_sample_size']
        wait_if_gui()
def test_client(fixed_waam_setup, viewer):
    # https://github.com/gramaziokohler/algorithmic_details/blob/e1d5e24a34738822638a157ca29a98afe05beefd/src/algorithmic_details/accessibility/reachability_map.py#L208-L231
    urdf_filename, semantics, _ = fixed_waam_setup
    move_group = 'robotA'

    with PyChoreoClient(viewer=viewer) as client:
        robot = client.load_robot(urdf_filename)
        robot.semantics = semantics
        robot_uid = client.get_robot_pybullet_uid(robot)
        tool_link_name = robot.get_end_effector_link_name(group=move_group)

        # * draw EE pose
        tool_link = link_from_name(robot_uid, tool_link_name)
        tcp_pose = get_link_pose(robot_uid, tool_link)
        draw_pose(tcp_pose)

        assert robot_uid in get_bodies()

        wait_if_gui()
Exemple #3
0
def descartes_demo(viewer=True, scaling_test=False, draw_graph=True):
    connect(use_gui=viewer)
    with HideOutput():
        robot = load_pybullet(KUKA_ROBOT_URDF, fixed_base=True)

    # * adjust camera pose (optional)
    # has_gui checks if the GUI mode is enabled
    if has_gui():
        camera_base_pt = (0,0,0)
        camera_pt = np.array(camera_base_pt) + np.array([1, -0.5, 1])
        set_camera_pose(tuple(camera_pt), camera_base_pt)

    cprint('Hello robot world! <ctrl+left mouse> to pan', 'green')
    wait_if_gui()

    ik_joints = get_movable_joints(robot)
    ik_joint_names = get_joint_names(robot, ik_joints)
    print('ik joint names: {}'.format(ik_joint_names))

    lower_limits, upper_limits = get_custom_limits(robot, ik_joints)
    print('joint lower limit: {}'.format(lower_limits))
    print('joint upper limit: {}'.format(upper_limits))
    # we can also read these velocities from the SRDF file (using e.g. COMPAS_FAB)
    # I'm a bit lazy, just handcode the values here
    vel_limits = {0 : 6.28318530718,
                  1 : 5.23598775598,
                  2 : 6.28318530718,
                  3 : 6.6497044501,
                  4 : 6.77187749774,
                  5 : 10.7337748998}

    # set the robot to a "comfortable" start configuration, optional
    robot_start_conf = [0,-np.pi/2,np.pi/2,0,0,0]
    set_joint_positions(robot, ik_joints, robot_start_conf)

    tool_link = link_from_name(robot, EE_LINK_NAME)
    robot_base_link = link_from_name(robot, ROBOT_BASE_LINK_NAME)
    # tool_from_root = get_relative_pose(robot, root_link, tool_link)

    # * draw EE pose
    if has_gui() :
        tcp_pose = get_link_pose(robot, tool_link)
        draw_pose(tcp_pose)

    circle_center = np.array([0.6, 0, 0.2])
    circle_r = 0.1
    # * generate multiple circles
    ee_poses = []
    full_angle = 2*2*np.pi
    # total num of path pts, one path point per 5 degree
    n_pt = int(full_angle / (np.pi/180 * 5))
    # full_angle = np.pi
    for a in np.linspace(0.0, full_angle, num=n_pt):
        pt = circle_center + circle_r*np.array([np.cos(a), np.sin(a), 0])
        circ_pose = multiply(Pose(point=pt, euler=Euler(yaw=a+np.pi/2)), Pose(euler=Euler(roll=np.pi*3/4)))
        draw_pose(circ_pose, length=0.01)
        ee_poses.append(circ_pose)

    # def get_ee_sample_fn(roll_gen, pitch_gen, yaw_gen):
    #     def ee_sample_fn(ee_pose):
    #         # a finite generator
    #         for roll, pitch, yaw in product(roll_gen, pitch_gen, yaw_gen):
    #             yield multiply(ee_pose, Pose(euler=Euler(roll=roll, pitch=pitch, yaw=yaw)))
    #     return ee_sample_fn
    # roll_sample_size = 3
    # pitch_sample_size = 3
    # yaw_sample_size = 1
    # delta_roll = np.pi/12
    # delta_pitch = np.pi/12
    # roll_gen = np.linspace(-delta_roll, delta_roll, num=roll_sample_size)
    # pitch_gen = np.linspace(-delta_pitch, delta_pitch, num=pitch_sample_size)
    # yaw_gen = np.linspace(0.0, 2*np.pi, num=yaw_sample_size)
    # ee_sample_fn = get_ee_sample_fn(roll_gen, pitch_gen, yaw_gen)

    # for ep in ee_sample_fn(ee_poses[0]):
    #     draw_pose(ep, length=0.03)

    # * baseline, keeping the EE z axis rotational dof fixed
    st_time = time.time()
    path = plan_cartesian_motion(robot, robot_base_link, tool_link, ee_poses)
    print('Solving time: {}'.format(elapsed_time(st_time)))
    if path is None:
        cprint('Gradient-based ik cartesian planning cannot find a plan!', 'red')
    else:
        cprint('Gradient-based ik cartesian planning find a plan!', 'green')
        time_step = 0.03
        for conf in path:
            set_joint_positions(robot, ik_joints, conf)
            wait_for_duration(time_step)
        if draw_graph:
            print('drawing graph...')
            plot_joint_val(path, lower_limits, upper_limits, method='GradIK')
    print('='*20)

    # * Now, let's try if using ladder graph without releasing the ee dof can give us a good trajectory
    # you should be doing something similar in your picture, right?

    # First, we will need ikfast to obtain ik solution variance, same in Descartes
    ik_fn = ikfast_kuka_kr6_r900.get_ik

    # we have to specify ik fn wrapper and feed it into pychoreo
    def get_sample_ik_fn(robot, ik_fn, robot_base_link, ik_joints, tool_from_root=None):
        def sample_ik_fn(world_from_tcp):
            if tool_from_root:
                world_from_tcp = multiply(world_from_tcp, tool_from_root)
            return sample_tool_ik(ik_fn, robot, ik_joints, world_from_tcp, robot_base_link, get_all=True)
        return sample_ik_fn
    # ik generation function stays the same for all cartesian processes
    sample_ik_fn = get_sample_ik_fn(robot, ik_fn, robot_base_link, ik_joints)

    # we ignore self collision in this tutorial, the collision_fn only considers joint limit now
    # See : https://github.com/yijiangh/pybullet_planning/blob/dev/tests/test_collisions.py
    # for more info on examples on using collision function
    collision_fn = get_collision_fn(robot, ik_joints, obstacles=[],
                                       attachments=[], self_collisions=False,
                                    #    disabled_collisions=disabled_collisions,
                                    #    extra_disabled_collisions=extra_disabled_collisions,
                                       custom_limits={})

    # Let's check if our ik sampler is working properly
    # uncomment the following if you want to play with this
    # for p in ee_poses:
    #     print('-'*5)
    #     pb_q = inverse_kinematics(robot, tool_link, p)
    #     if pb_q is None:
    #         cprint('pb ik can\'t find an ik solution', 'red')
    #     qs = sample_ik_fn(p)
    #     if qs is not None:
    #         cprint('But Ikfast does find one! {}'.format(qs[0]), 'green')
    #         # set_joint_positions(robot, ik_joints, qs[0])
    #         # wait_if_gui()
    #     else:
    #         cprint('ikfast can\'t find an ik solution', 'red')

    # * Ok, now we have the ik sampler and collision function ready, let's see if we can find a valid cartesian trajectory!
    ee_vel = 0.005 # m/s

    # st_time = time.time()
    # path, cost = plan_cartesian_motion_lg(robot, ik_joints, ee_poses, sample_ik_fn, collision_fn, \
    #     custom_vel_limits=vel_limits, ee_vel=ee_vel)
    # print('Solving time: {}'.format(elapsed_time(st_time)))
    # if path is None:
    #     cprint('ladder graph (w/o releasing dof) cartesian planning cannot find a plan!', 'red')
    # else:
    #     cprint('ladder graph (w/o releasing dof) cartesian planning find a plan!', 'green')
    #     cprint('Cost: {}'.format(cost), 'yellow')
    #     time_step = 0.03
    #     for conf in path:
    #         # cprint('conf: {}'.format(conf))
    #         set_joint_positions(robot, ik_joints, conf)
    #         wait_for_duration(time_step)
    #     if draw_graph:
    #         print('drawing graph...')
    #         plot_joint_val(path, lower_limits, upper_limits, method='IKFast+LadderGraph')
    # print('='*20)

    # * Now, let's see if releasing EE z axis dof can bring down the cost
    # First, let's build an end effector pose sampler to release the EE z axis dof!
    ## * EE z axis dof generator
    def get_ee_sample_fn(roll_gen, pitch_gen, yaw_gen):
        def ee_sample_fn(ee_pose):
            # a finite generator
            for roll, pitch, yaw in product(roll_gen, pitch_gen, yaw_gen):
                yield multiply(ee_pose, Pose(euler=Euler(roll=roll, pitch=pitch, yaw=yaw)))
        return ee_sample_fn

    # by increasing this number we can see the cost go down
    roll_sample_size = 10
    pitch_sample_size = 10
    yaw_sample_size = 10
    delta_roll = np.pi/6
    delta_pitch = np.pi/6
    delta_yaw = np.pi/6
    roll_gen = np.linspace(-delta_roll, delta_roll, num=roll_sample_size)
    pitch_gen = np.linspace(-delta_pitch, delta_pitch, num=pitch_sample_size)
    yaw_gen = np.linspace(-delta_yaw, delta_yaw, num=yaw_sample_size)
    ee_sample_fn = get_ee_sample_fn(roll_gen, pitch_gen, yaw_gen)

    st_time = time.time()
    path, cost = plan_cartesian_motion_lg(robot, ik_joints, ee_poses, sample_ik_fn, collision_fn, sample_ee_fn=ee_sample_fn, \
        custom_vel_limits=vel_limits, ee_vel=ee_vel)
    print('Solving time: {}'.format(elapsed_time(st_time)))

    # TODO: plot ee z axis deviation plot

    # the ladder graph cost is just summation of all adjacent joint difference
    # so the following assertion should be true
    # conf_array = np.array(path)
    # conf_diff = np.abs(conf_array[:-1,:] - conf_array[1:,:])
    # np_cost = np.sum(conf_diff)
    # assert np.allclose(np_cost, cost), '{} - {}'.format(np_cost, cost)

    if path is None:
        cprint('ladder graph (releasing EE z dof) cartesian planning cannot find a plan!', 'red')
    else:
        cprint('ladder graph (releasing EE z dof) cartesian planning find a plan!', 'cyan')
        cprint('Cost: {}'.format(cost), 'yellow')
        time_step = 0.03
        for conf in path:
            # cprint('conf: {}'.format(conf))
            set_joint_positions(robot, ik_joints, conf)
            wait_for_duration(time_step)
        if draw_graph:
            print('drawing graph...')
            plot_joint_val(path, lower_limits, upper_limits, method='IKFast+LadderGraph+release z dof-res{}'.format(yaw_sample_size))
    print('='*20)

    # Now, let's do a scaling test:
    # if scaling_test:
        # res_sc = list(range(4,30))
        # cost_sc = []
        # for res in res_sc:
        #     yaw_sample_size = res
        #     yaw_gen = np.linspace(0.0, 2*np.pi, num=yaw_sample_size)
        #     ee_sample_fn = get_ee_sample_fn(yaw_gen)
        #     path, cost = plan_cartesian_motion_lg(robot, ik_joints, ee_poses, sample_ik_fn, collision_fn, sample_ee_fn=ee_sample_fn)
        #     assert path is not None
        #     cost_sc.append(cost)
        #     if res % 5 == 0:
        #         if draw_graph:
        #             plot_joint_val(path, lower_limits, upper_limits, method='IKFast+LadderGraph+release z dof-res{}'.format(yaw_sample_size))

        # fig, ax = plt.subplots()
        # ax.plot(res_sc, cost_sc)
        # ax.set(xlabel='yaw angle discr resolution', ylabel='total joint difference cost',
        #        title='Total joint diff scaling test')
        # ax.grid()
        # fig.savefig(os.path.join(HERE, 'images', 'tota_diff-scaling.png'))
        # # plt.show()

    wait_if_gui('Press enter to exit')
def assembly_demo(viewer=True, debug=False):
    connect(use_gui=viewer)

    # * zoom in so we can see it, this is optional
    camera_base_pt = (0, 0, 0)
    camera_pt = np.array(camera_base_pt) + np.array([0.1, -0.1, 0.1])
    set_camera_pose(tuple(camera_pt), camera_base_pt)

    # * a simple example showing two artificial beam colliding at the interface
    # compare this "brep-like" approach to the mesh approach below,
    # here we have less numerical error at the interface
    cprint('=' * 10)
    cprint('Checking collisions between two created boxes.', 'cyan')
    w = 0.05
    l = 0.5
    h = 0.01
    eps_list = [-1e-14, 0, 1e-3, 1e-6, 1e-14]
    b0_body = create_box(w, l, h, color=apply_alpha(RED, 0.5))
    b1_body = create_box(w, l, h, color=apply_alpha(BLUE, 0.5))
    for eps in eps_list:
        print('=' * 5)
        cprint('Pertubation distance: {}'.format(eps), 'yellow')
        # the beam on the right is perturbed towards left by eps
        set_pose(b0_body, Pose(point=[0, l / 2 - eps, 0]))
        set_pose(b1_body, Pose(point=[0, -l / 2, 0]))

        is_collided = pairwise_collision(b0_body, b1_body)
        if is_collided:
            cprint(
                'Collision detected! The penetration depth shown below should be close to eps={}'
                .format(eps), 'red')
            cr = pairwise_collision_info(b0_body, b1_body)
            draw_collision_diagnosis(cr, focus_camera=True)
        else:
            cprint('No collision detected between b0 and b1.', 'green')
        assert eps <= 0 or is_collided

    # * now let's load Kathrin's beams and check pairwise collision among them
    # notice that in pybullet everything is in METER, and each beam is configured at its pose in the world
    # already in the obj files (directly exported from Rhino).
    cprint('=' * 10)
    cprint('Checking collisions among loaded obj elements.', 'cyan')
    beam_from_names = {}
    for i in range(150):
        beam_path = os.path.join(ASSEMBLY_OBJ_DIR, 'element_{}.obj'.format(i))
        beam_from_names[i] = create_obj(beam_path,
                                        scale=1,
                                        color=apply_alpha(BLUE, 0.2))

    collided_pairs = set()
    for i in range(150):
        for j in range(i + 1, 150):
            if (i, j) not in collided_pairs and (j, i) not in collided_pairs:
                if pairwise_collision(beam_from_names[i], beam_from_names[j]):
                    # using meshes (comparing to the `create_box` approach above) induces numerical errors
                    # in our case here the touching faces will be checked as collisions.
                    # Thus, we have to query the getClosestPoint info and use the penetration depth to filter these "touching" cases out
                    # reference for these info: https://docs.google.com/document/d/10sXEhzFRSnvFcl3XxNGhnD4N2SedqwdAvK3dsihxVUA/edit#heading=h.cb0co8y2vuvc
                    cr = body_collision_info(beam_from_names[i],
                                             beam_from_names[j])
                    penetration_depth = get_distance(cr[0][5], cr[0][6])
                    # this numner `3e-3` below is based on some manual experiement,
                    # might need to be changed accordingly for specific scales and input models
                    if penetration_depth > 3e-3:
                        cprint(
                            '({}-{}) colliding : penetrating depth {:.4E}'.
                            format(i, j, penetration_depth), 'red')
                        collided_pairs.add((i, j))
                        if debug:
                            draw_collision_diagnosis(cr, focus_camera=False)

    # all the colliding information is in `collided_pairs`
    cprint(
        'In total we\'ve found {} collided element pairs!'.format(
            len(collided_pairs)), 'green')
    wait_if_gui('End.')
def test_collision_checker(abb_irb4600_40_255_setup, itj_TC_g1_cms,
                           itj_beam_cm, column_obstacle_cm, base_plate_cm,
                           itj_tool_changer_grasp_transf,
                           itj_gripper_grasp_transf, itj_beam_grasp_transf,
                           tool_type, itj_tool_changer_urdf_path,
                           itj_g1_urdf_path, viewer, diagnosis):
    # modified from https://github.com/yijiangh/pybullet_planning/blob/dev/tests/test_collisions.py
    urdf_filename, semantics = abb_irb4600_40_255_setup

    move_group = 'bare_arm'
    ee_touched_link_names = ['link_6']

    with PyChoreoClient(viewer=viewer) as client:
        with LockRenderer():
            robot = client.load_robot(urdf_filename)
            robot.semantics = semantics
            client.disabled_collisions = robot.semantics.disabled_collisions

            if tool_type == 'static':
                for _, ee_cm in itj_TC_g1_cms.items():
                    client.add_collision_mesh(ee_cm)
            else:
                client.add_tool_from_urdf('TC', itj_tool_changer_urdf_path)
                client.add_tool_from_urdf('g1', itj_g1_urdf_path)

            # * add static obstacles
            client.add_collision_mesh(base_plate_cm)
            client.add_collision_mesh(column_obstacle_cm)

        ik_joint_names = robot.get_configurable_joint_names(group=move_group)
        ik_joint_types = robot.get_joint_types_by_names(ik_joint_names)
        flange_link_name = robot.get_end_effector_link_name(group=move_group)

        tool0_tf = Transformation.from_frame(
            client.get_link_frame_from_name(robot, flange_link_name))
        tool0_from_tool_changer_base = itj_tool_changer_grasp_transf
        tool0_from_gripper_base = itj_gripper_grasp_transf
        client.set_object_frame(
            '^{}'.format('TC'),
            Frame.from_transformation(tool0_tf * tool0_from_tool_changer_base))
        client.set_object_frame(
            '^{}'.format('g1'),
            Frame.from_transformation(tool0_tf * tool0_from_gripper_base))

        names = client._get_collision_object_names('^{}'.format('g1')) + \
            client._get_collision_object_names('^{}'.format('TC'))
        for ee_name in names:
            attach_options = {'robot': robot}
            if tool_type == 'actuated':
                attached_child_link_name = 'toolchanger_base' if 'TC' in ee_name else 'gripper_base'
                attach_options.update(
                    {'attached_child_link_name': attached_child_link_name})
            client.add_attached_collision_mesh(AttachedCollisionMesh(
                CollisionMesh(None, ee_name),
                flange_link_name,
                touch_links=ee_touched_link_names),
                                               options=attach_options)
        # client._print_object_summary()
        # wait_if_gui('EE attached.')

        if tool_type == 'actuated':
            # lower 0.0008 upper 0.01
            tool_bodies = client._get_bodies('^{}'.format('itj_PG500'))
            tool_conf = Configuration(
                values=[0.01, 0.01],
                types=[Joint.PRISMATIC, Joint.PRISMATIC],
                joint_names=['joint_gripper_jaw_l', 'joint_gripper_jaw_r'])
            for b in tool_bodies:
                client._set_body_configuration(b, tool_conf)
            wait_if_gui('Open')

            tool_conf = Configuration(
                values=[0.0008, 0.0008],
                types=[Joint.PRISMATIC, Joint.PRISMATIC],
                joint_names=['joint_gripper_jaw_l', 'joint_gripper_jaw_r'])
            for b in tool_bodies:
                client._set_body_configuration(b, tool_conf)
            wait_if_gui('Close')

        cprint('safe start conf', 'green')
        conf = Configuration(values=[0.] * 6,
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert not client.check_collisions(
            robot, conf, options={'diagnosis': diagnosis})

        cprint('joint over limit', 'cyan')
        conf = Configuration(values=[0., 0., 1.5, 0, 0, 0],
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert client.check_collisions(robot,
                                       conf,
                                       options={'diagnosis': diagnosis})

        cprint('attached gripper-obstacle collision - column', 'cyan')
        vals = [
            -0.33161255787892263, -0.43633231299858238, 0.43633231299858238,
            -1.0471975511965976, 0.087266462599716474, 0.0
        ]
        # conf = Configuration(values=vals, types=ik_joint_types, joint_names=ik_joint_names)
        # client.set_robot_configuration(robot, conf)
        # wait_if_gui()
        assert client.check_collisions(robot,
                                       conf,
                                       options={'diagnosis': diagnosis})

        #* attach beam
        client.add_collision_mesh(itj_beam_cm)
        tool0_tf = Transformation.from_frame(
            client.get_link_frame_from_name(robot, flange_link_name))
        tool0_from_beam_base = itj_beam_grasp_transf
        client.set_object_frame(
            '^{}$'.format('itj_beam_b2'),
            Frame.from_transformation(tool0_tf * tool0_from_beam_base))
        client.add_attached_collision_mesh(AttachedCollisionMesh(
            CollisionMesh(None, 'itj_beam_b2'),
            flange_link_name,
            touch_links=[]),
                                           options={'robot': robot})
        # wait_if_gui('beam attached.')

        cprint('attached beam-robot body self collision', 'cyan')
        vals = [
            0.73303828583761843, -0.59341194567807209, 0.54105206811824214,
            -0.17453292519943295, 1.064650843716541, 1.7278759594743862
        ]
        conf = Configuration(values=vals,
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert client.check_collisions(robot,
                                       conf,
                                       options={'diagnosis': diagnosis})

        cprint('attached beam-obstacle collision - column', 'cyan')
        vals = [
            0.087266462599716474, -0.19198621771937624, 0.20943951023931956,
            0.069813170079773182, 1.2740903539558606, 0.069813170079773182
        ]
        conf = Configuration(values=vals,
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert client.check_collisions(robot,
                                       conf,
                                       options={'diagnosis': diagnosis})

        cprint('attached beam-obstacle collision - ground', 'cyan')
        vals = [
            -0.017453292519943295, 0.6108652381980153, 0.20943951023931956,
            1.7627825445142729, 1.2740903539558606, 0.069813170079773182
        ]
        conf = Configuration(values=vals,
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert client.check_collisions(robot,
                                       conf,
                                       options={'diagnosis': diagnosis})

        cprint('robot link-obstacle collision - column', 'cyan')
        vals = [
            -0.41887902047863912, 0.20943951023931956, 0.20943951023931956,
            1.7627825445142729, 1.2740903539558606, 0.069813170079773182
        ]
        conf = Configuration(values=vals,
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert client.check_collisions(robot,
                                       conf,
                                       options={'diagnosis': diagnosis})
        cprint('robot link-obstacle collision - ground', 'cyan')
        vals = [
            0.33161255787892263, 1.4660765716752369, 0.27925268031909273,
            0.17453292519943295, 0.22689280275926285, 0.54105206811824214
        ]
        conf = Configuration(values=vals,
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert client.check_collisions(robot,
                                       conf,
                                       options={'diagnosis': diagnosis})

        cprint('Sweeping collision', 'cyan')
        vals = [
            -0.12217304763960307, -0.73303828583761843, 0.83775804095727824,
            -2.4609142453120048, 1.2391837689159739, -0.85521133347722145
        ]
        conf1 = Configuration(values=vals,
                              types=ik_joint_types,
                              joint_names=ik_joint_names)
        assert not client.check_collisions(
            robot, conf1, options={'diagnosis': diagnosis})
        # wait_if_gui()

        vals = [
            -0.12217304763960307, -0.73303828583761843, 0.83775804095727824,
            -2.4958208303518914, -1.5533430342749532, -0.85521133347722145
        ]
        conf2 = Configuration(values=vals,
                              types=ik_joint_types,
                              joint_names=ik_joint_names)
        assert not client.check_collisions(
            robot, conf2, options={'diagnosis': diagnosis})
        # wait_if_gui()

        assert client.check_sweeping_collisions(robot,
                                                conf1,
                                                conf2,
                                                options={
                                                    'diagnosis': diagnosis,
                                                    'line_width': 3.0
                                                })

        wait_if_gui("Finished.")
def test_ik(fixed_waam_setup, viewer, ik_engine):
    urdf_filename, semantics, robotA_tool = fixed_waam_setup
    move_group = 'robotA'

    with PyChoreoClient(viewer=viewer) as client:
        robot = client.load_robot(urdf_filename)
        robot.semantics = semantics
        robot_uid = client.get_robot_pybullet_uid(robot)

        base_link_name = robot.get_base_link_name(group=move_group)
        ik_joint_names = robot.get_configurable_joint_names(group=move_group)
        tool_link_name = robot.get_end_effector_link_name(group=move_group)
        base_frame = robot.get_base_frame(group=move_group)

        if ik_engine == 'default-single':
            ik_solver = None
        elif ik_engine == 'lobster-analytical':
            ik_solver = InverseKinematicsSolver(robot, move_group,
                                                ik_abb_irb4600_40_255,
                                                base_frame, robotA_tool.frame)
        elif ik_engine == 'ikfast-analytical':
            ikfast_fn = get_ik_fn_from_ikfast(ikfast_abb_irb4600_40_255.get_ik)
            ik_solver = InverseKinematicsSolver(robot, move_group, ikfast_fn,
                                                base_frame, robotA_tool.frame)
        else:
            raise ValueError('invalid ik engine name.')

        ik_joints = joints_from_names(robot_uid, ik_joint_names)
        tool_link = link_from_name(robot_uid, tool_link_name)
        ee_poses = compute_circle_path()

        if ik_solver is not None:
            # replace default ik function with a customized one
            client.planner.inverse_kinematics = ik_solver.inverse_kinematics_function(
            )

        ik_time = 0
        failure_cnt = 0
        # ik function sanity check
        for p in ee_poses:
            frame_WCF = frame_from_pose(p)
            st_time = time.time()
            qs = client.inverse_kinematics(robot,
                                           frame_WCF,
                                           group=move_group,
                                           options={})
            ik_time += elapsed_time(st_time)

            if qs is None:
                cprint('no ik solution found!', 'red')
                # assert False, 'no ik solution found!'
                failure_cnt += 1
            elif isinstance(qs, list):
                if not (len(qs) > 0 and any([qv is not None for qv in qs])):
                    cprint('no ik solution found', 'red')
                    failure_cnt += 1
                if len(qs) > 0:
                    # cprint('{} solutions found!'.format(len(qs)), 'green')
                    for q in randomize(qs):
                        if q is not None:
                            assert isinstance(q, Configuration)
                            client.set_robot_configuration(robot, q)
                            # set_joint_positions(robot_uid, ik_joints, q.values)
                            tcp_pose = get_link_pose(robot_uid, tool_link)
                            assert_almost_equal(tcp_pose[0], p[0], decimal=3)
                            assert_almost_equal(quat_angle_between(
                                tcp_pose[1], p[1]),
                                                0,
                                                decimal=3)
            elif isinstance(qs, Configuration):
                # cprint('Single solutions found!', 'green')
                q = qs
                # set_joint_positions(robot_uid, ik_joints, q.values)
                client.set_robot_configuration(robot, q)
                tcp_pose = get_link_pose(robot_uid, tool_link)
                assert_almost_equal(tcp_pose[0], p[0], decimal=3)
                assert_almost_equal(quat_angle_between(tcp_pose[1], p[1]),
                                    0,
                                    decimal=3)
            else:
                raise ValueError('invalid ik return.')
            wait_if_gui('FK - IK agrees.')
        cprint(
            '{} | Success {}/{} | Average ik time: {} | avg over {} calls.'.
            format(ik_engine,
                   len(ee_poses) - failure_cnt, len(ee_poses),
                   ik_time / len(ee_poses), len(ee_poses)), 'cyan')
def test_plan_motion(abb_irb4600_40_255_setup, itj_TC_g1_cms, itj_beam_cm,
                     column_obstacle_cm, base_plate_cm,
                     itj_tool_changer_grasp_transf, itj_gripper_grasp_transf,
                     itj_beam_grasp_transf, tool_type,
                     itj_tool_changer_urdf_path, itj_g1_urdf_path, viewer,
                     diagnosis):
    # modified from https://github.com/yijiangh/pybullet_planning/blob/dev/tests/test_collisions.py
    urdf_filename, semantics = abb_irb4600_40_255_setup

    move_group = 'bare_arm'
    ee_touched_link_names = ['link_6']

    with PyChoreoClient(viewer=viewer) as client:
        with LockRenderer():
            robot = client.load_robot(urdf_filename)
            robot.semantics = semantics
            client.disabled_collisions = robot.semantics.disabled_collisions

            if tool_type == 'static':
                for _, ee_cm in itj_TC_g1_cms.items():
                    client.add_collision_mesh(ee_cm)
            else:
                client.add_tool_from_urdf('TC', itj_tool_changer_urdf_path)
                client.add_tool_from_urdf('g1', itj_g1_urdf_path)

            # * add static obstacles
            client.add_collision_mesh(base_plate_cm)
            client.add_collision_mesh(column_obstacle_cm)

        ik_joint_names = robot.get_configurable_joint_names(group=move_group)
        ik_joint_types = robot.get_joint_types_by_names(ik_joint_names)
        flange_link_name = robot.get_end_effector_link_name(group=move_group)

        tool0_tf = Transformation.from_frame(
            client.get_link_frame_from_name(robot, flange_link_name))
        tool0_from_tool_changer_base = itj_tool_changer_grasp_transf
        tool0_from_gripper_base = itj_gripper_grasp_transf
        client.set_object_frame(
            '^{}'.format('TC'),
            Frame.from_transformation(tool0_tf * tool0_from_tool_changer_base))
        client.set_object_frame(
            '^{}'.format('g1'),
            Frame.from_transformation(tool0_tf * tool0_from_gripper_base))

        names = client._get_collision_object_names('^{}'.format('g1')) + \
            client._get_collision_object_names('^{}'.format('TC'))
        for ee_name in names:
            attach_options = {'robot': robot}
            if tool_type == 'actuated':
                attached_child_link_name = 'toolchanger_base' if 'TC' in ee_name else 'gripper_base'
                attach_options.update(
                    {'attached_child_link_name': attached_child_link_name})
            client.add_attached_collision_mesh(AttachedCollisionMesh(
                CollisionMesh(None, ee_name),
                flange_link_name,
                touch_links=ee_touched_link_names),
                                               options=attach_options)

        #* attach beam
        client.add_collision_mesh(itj_beam_cm)
        tool0_tf = Transformation.from_frame(
            client.get_link_frame_from_name(robot, flange_link_name))
        tool0_from_beam_base = itj_beam_grasp_transf
        client.set_object_frame(
            '^{}$'.format('itj_beam_b2'),
            Frame.from_transformation(tool0_tf * tool0_from_beam_base))
        client.add_attached_collision_mesh(AttachedCollisionMesh(
            CollisionMesh(None, 'itj_beam_b2'),
            flange_link_name,
            touch_links=[]),
                                           options={'robot': robot})
        wait_if_gui('beam attached.')

        vals = [
            -1.4660765716752369, -0.22689280275926285, 0.27925268031909273,
            0.17453292519943295, 0.22689280275926285, -0.22689280275926285
        ]
        start_conf = Configuration(values=vals,
                                   types=ik_joint_types,
                                   joint_names=ik_joint_names)
        # client.set_robot_configuration(robot, start_conf)
        # wait_if_gui()

        # vals = [0.05235987755982989, -0.087266462599716474, -0.05235987755982989, 1.7104226669544429, 0.13962634015954636, -0.43633231299858238]
        vals = [
            0.034906585039886591, 0.68067840827778847, 0.15707963267948966,
            -0.89011791851710809, -0.034906585039886591, -2.2514747350726849
        ]
        end_conf = Configuration(values=vals,
                                 types=ik_joint_types,
                                 joint_names=ik_joint_names)
        # client.set_robot_configuration(robot, end_conf)
        # wait_if_gui()

        plan_options = {'diagnosis': True, 'resolutions': 0.05}

        goal_constraints = robot.constraints_from_configuration(
            end_conf, [0.01], [0.01], group=move_group)

        st_time = time.time()
        trajectory = client.plan_motion(robot,
                                        goal_constraints,
                                        start_configuration=start_conf,
                                        group=move_group,
                                        options=plan_options)
        print('Solving time: {}'.format(elapsed_time(st_time)))

        if trajectory is None:
            cprint('Client motion planner CANNOT find a plan!', 'red')
            # assert False, 'Client motion planner CANNOT find a plan!'
            # TODO warning
        else:
            cprint('Client motion planning find a plan!', 'green')
            wait_if_gui('Start sim.')
            time_step = 0.03
            for traj_pt in trajectory.points:
                client.set_robot_configuration(robot, traj_pt)
                wait_for_duration(time_step)

        wait_if_gui("Finished.")
def test_circle_cartesian(fixed_waam_setup, viewer, planner_ik_conf):
    urdf_filename, semantics, robotA_tool = fixed_waam_setup
    planner_id, ik_engine = planner_ik_conf

    move_group = 'robotA'
    print('\n')
    print('=' * 10)
    cprint(
        'Cartesian planner {} with IK engine {}'.format(planner_id, ik_engine),
        'yellow')

    with PyChoreoClient(viewer=viewer) as client:
        robot = client.load_robot(urdf_filename)
        robot.semantics = semantics
        robot_uid = client.get_robot_pybullet_uid(robot)

        base_link_name = robot.get_base_link_name(group=move_group)
        ik_joint_names = robot.get_configurable_joint_names(group=move_group)
        tool_link_name = robot.get_end_effector_link_name(group=move_group)
        base_frame = robot.get_base_frame(group=move_group)

        if ik_engine == 'default-single':
            ik_solver = None
        elif ik_engine == 'lobster-analytical':
            ik_solver = InverseKinematicsSolver(robot, move_group,
                                                ik_abb_irb4600_40_255,
                                                base_frame, robotA_tool.frame)
        elif ik_engine == 'ikfast-analytical':
            ikfast_fn = get_ik_fn_from_ikfast(ikfast_abb_irb4600_40_255.get_ik)
            ik_solver = InverseKinematicsSolver(robot, move_group, ikfast_fn,
                                                base_frame, robotA_tool.frame)
        else:
            raise ValueError('invalid ik engine name.')

        init_conf = Configuration.from_revolute_values(np.zeros(6),
                                                       ik_joint_names)

        # replace default ik function with a customized one
        if ik_solver is not None:
            client.planner.inverse_kinematics = ik_solver.inverse_kinematics_function(
            )

        tool_link = link_from_name(robot_uid, tool_link_name)
        robot_base_link = link_from_name(robot_uid, base_link_name)
        ik_joints = joints_from_names(robot_uid, ik_joint_names)

        # * draw EE pose
        tcp_pose = get_link_pose(robot_uid, tool_link)
        draw_pose(tcp_pose)

        # * generate multiple circles
        circle_center = np.array([2, 0, 0.2])
        circle_r = 0.2
        # full_angle = np.pi
        # full_angle = 2*2*np.pi
        angle_range = (-0.5 * np.pi, 0.5 * np.pi)
        # total num of path pts, one path point per 5 degree
        ee_poses = compute_circle_path(circle_center, circle_r, angle_range)
        ee_frames_WCF = [frame_from_pose(ee_pose) for ee_pose in ee_poses]

        options = {'planner_id': planner_id}
        if planner_id == 'LadderGraph':
            client.set_robot_configuration(robot, init_conf)
            st_time = time.time()

            # options.update({'ik_function' : lambda pose: compute_inverse_kinematics(ikfast_abb_irb4600_40_255.get_ik, pose, sampled=[])})

            trajectory = client.plan_cartesian_motion(robot,
                                                      ee_frames_WCF,
                                                      group=move_group,
                                                      options=options)
            cprint(
                'W/o frame variant solving time: {}'.format(
                    elapsed_time(st_time)), 'blue')
            cprint(
                'Cost: {}'.format(
                    compute_trajectory_cost(trajectory,
                                            init_conf_val=init_conf.values)),
                'blue')
            print('-' * 5)

            f_variant_options = {'delta_yaw': np.pi / 3, 'yaw_sample_size': 5}
            options.update({
                'frame_variant_generator':
                PyChoreoFiniteEulerAngleVariantGenerator(
                    options=f_variant_options)
            })
            print('With frame variant config: {}'.format(f_variant_options))

        client.set_robot_configuration(robot, init_conf)
        st_time = time.time()
        trajectory = client.plan_cartesian_motion(robot,
                                                  ee_frames_WCF,
                                                  group=move_group,
                                                  options=options)
        cprint(
            '{} solving time: {}'.format(
                'With frame variant '
                if planner_id == 'LadderGraph' else 'Direct',
                elapsed_time(st_time)), 'cyan')
        cprint(
            'Cost: {}'.format(
                compute_trajectory_cost(trajectory,
                                        init_conf_val=init_conf.values)),
            'cyan')

        if trajectory is None:
            cprint(
                'Client Cartesian planner {} CANNOT find a plan!'.format(
                    planner_id), 'red')
        else:
            cprint(
                'Client Cartesian planning {} find a plan!'.format(planner_id),
                'green')
            wait_if_gui('Start sim.')
            time_step = 0.03
            for traj_pt in trajectory.points:
                client.set_robot_configuration(robot, traj_pt)
                wait_for_duration(time_step)

        wait_if_gui()
def test_collision_checker(rfl_setup, itj_beam_cm, viewer, diagnosis):
    # modified from https://github.com/yijiangh/pybullet_planning/blob/dev/tests/test_collisions.py
    urdf_filename, semantics = rfl_setup
    move_group = 'robot11_eaXYZ'  # 'robot12_eaYZ'

    with PyChoreoClient(viewer=viewer) as client:
        cprint(urdf_filename, 'green')
        with LockRenderer():
            robot = client.load_robot(urdf_filename)
        robot.semantics = semantics
        robot_uid = client.get_robot_pybullet_uid(robot)

        ik_joint_names = robot.get_configurable_joint_names(group=move_group)
        ik_joint_types = robot.get_joint_types_by_names(ik_joint_names)
        flange_link_name = robot.get_end_effector_link_name(group=move_group)

        ee_touched_link_names = ['robot12_tool0', 'robot12_link_6']
        # ee_acms = [AttachedCollisionMesh(ee_cm, flange_link_name, ee_touched_link_names) for ee_cm in itj_TC_PG500_cms]
        # beam_acm = AttachedCollisionMesh(itj_beam_cm, flange_link_name, ee_touched_link_names)

        draw_pose(unit_pose(), length=1.)

        cam = rfl_camera()
        set_camera_pose(cam['target'], cam['location'])

        ik_joints = joints_from_names(robot_uid, ik_joint_names)
        flange_link = link_from_name(robot_uid, flange_link_name)
        # robot_base_link = link_from_name(robot_uid, base_link_name)

        r11_start_conf_vals = np.array([22700.0, 0.0, -4900.0, \
            0.0, -80.0, 65.0, 65.0, 20.0, -20.0])
        r12_start_conf_vals = np.array([-4056.0883789999998, -4000.8486330000001, \
            0.0, -22.834741999999999, -30.711554, 0.0, 57.335655000000003, 0.0])
        full_start_conf = to_rlf_robot_full_conf(r11_start_conf_vals,
                                                 r12_start_conf_vals)
        # full_joints = joints_from_names(robot_uid, full_start_conf.joint_names)

        client.set_robot_configuration(robot, full_start_conf)

        # # * add attachment
        # for ee_acm in ee_acms:
        #     client.add_attached_collision_mesh(ee_acm, options={'robot' : robot, 'mass': 1})
        # client.add_attached_collision_mesh(beam_acm, options={'robot' : robot, 'mass': 1})

        # safe start conf
        start_confval = np.hstack([
            r11_start_conf_vals[:3] * 1e-3,
            np.radians(r11_start_conf_vals[3:])
        ])
        conf = Configuration(values=start_confval.tolist(),
                             types=ik_joint_types,
                             joint_names=ik_joint_names)
        assert not client.check_collisions(
            robot, conf, options={'diagnosis': diagnosis})

        # TODO add more tests
        from pybullet_planning import get_link_subtree, prune_fixed_joints, clone_body, get_movable_joints, get_all_links, \
            set_color, child_link_from_joint
        first_joint = ik_joints[0]
        target_link = flange_link

        # selected_links = get_link_subtree(robot_uid, first_joint) # TODO: child_link_from_joint?
        selected_links = [child_link_from_joint(joint) for joint in ik_joints]

        selected_movable_joints = prune_fixed_joints(robot_uid, selected_links)
        assert (target_link in selected_links)
        selected_target_link = selected_links.index(target_link)
        sub_robot = clone_body(robot_uid,
                               links=selected_links,
                               visual=True,
                               collision=True)  # TODO: joint limits
        sub_movable_joints = get_movable_joints(sub_robot)

        # conf = Configuration(values=[0.]*6, types=ik_joint_types, joint_names=ik_joint_names)
        # assert not client.configuration_in_collision(conf, group=move_group, options={'diagnosis':True})
        wait_if_gui()