def pybullet():
    import pybullet as pb

    pb.connect(pb.DIRECT)
    pb.setTimeStep(0.01)
    pb.setGravity(0, 0, -9.81)

    kitchen = pb.loadURDF(
        res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'),
        [0, 0, 0], [0, 0, 0, 1],
        0,
        True,
        flags=pb.URDF_USE_SELF_COLLISION_EXCLUDE_PARENT)
    pr2 = pb.loadURDF(res_pkg_path(
        'package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml'),
                      [0, 0, 0], [0, 0, 0, 1],
                      0,
                      True,
                      flags=pb.URDF_USE_SELF_COLLISION_EXCLUDE_PARENT)

    print(kitchen, pr2)

    for x in tqdm(range(100)):
        cps = pb.getClosestPoints(pr2, kitchen, 40.0, 1, 1)
        print('\nClosest points:\n{}'.format(cps))
예제 #2
0
 def test_model_eq(self):
     urdf_model = URDF.from_xml_file(res_pkg_path('package://kineverse/urdf/testbot.urdf'))
     ks1 = ArticulationModel()
     ks2 = ArticulationModel()
     load_urdf(ks1, Path(urdf_model.name), urdf_model)
     load_urdf(ks2, Path(urdf_model.name), urdf_model)
     self.assertEquals(ks1, ks2)
예제 #3
0
    def test_double_reload(self):
        km = ArticulationModel()
        urdf_model = URDF.from_xml_file(res_pkg_path('package://kineverse/urdf/testbot.urdf'))
        load_urdf(km, Path(urdf_model.name), urdf_model)
        km.clean_structure()
        eef_frame_1 = km.get_data(Path(urdf_model.name) + Path('links/gripper_link/pose'))

        load_urdf(km, Path(urdf_model.name), urdf_model)
        km.clean_structure()
        eef_frame_2 = km.get_data(Path(urdf_model.name) + Path('links/gripper_link/pose'))

        self.assertEquals(eef_frame_1, eef_frame_2)
예제 #4
0
from kineverse.model.articulation_model import Path
from kineverse.model.geometry_model import GeometryModel
from kineverse.operations.urdf_operations import load_urdf
from kineverse.utils import res_pkg_path
from kineverse.visualization.graph_generator import generate_dependency_graph, plot_graph
from kineverse.visualization.plotting import draw_recorders, split_recorders
from kineverse.urdf_fix import urdf_filler

from sensor_msgs.msg import JointState as JointStateMsg
from urdf_parser_py.urdf import URDF

if __name__ == '__main__':
    rospy.init_node('kineverse_kitchen_sandbox')

    plot_dir = res_pkg_path('package://kineverse/test/plots')
    pub_path = '/opt/ros/{}/lib/robot_state_publisher/robot_state_publisher'.format(
        os.environ['ROS_DISTRO'])

    with open(
            res_pkg_path(
                'package://faculty_lounge_kitchen_description/urdfs/kitchen.urdf'
            ), 'r') as urdf_file:
        kitchen_urdf_str = urdf_file.read()

    with open(res_pkg_path('package://fetch_description/robots/fetch.urdf'),
              'r') as urdf_file:
        fetch_urdf_str = urdf_file.read()

    kitchen_urdf_model = urdf_filler(URDF.from_xml_string(kitchen_urdf_str))
    rospy.set_param('/{}/robot_description'.format(kitchen_urdf_model.name),
예제 #5
0
    out.set_collision_shape(shape)
    out.collision_flags = pb.CollisionObject.KinematicObject
    out.transform = transform
    return out


if __name__ == '__main__':
    rospy.init_node('kineverse_bullet_test')

    vis = ROSBPBVisualizer('bullet_test', 'world')

    kw = pb.KineverseWorld()
    kw.force_update_all_aabbs = True

    suzanne_obj = pb.load_convex_shape(
        res_pkg_path("package://kineverse/meshes/suzanne.obj"))
    suzanne_stl = pb.load_convex_shape(
        res_pkg_path("package://kineverse/meshes/suzanne.stl"))

    cube_shape = pb.BoxShape(pb.Vector3(0.5, 0.5, 0.5))
    cube_body = create_object(cube_shape, pb.Transform(1, 0, 0))
    cube2_body = create_object(cube_shape, pb.Transform(-1, 0, 0))
    #obj_body   = create_object(suzanne_obj, pb.Transform(0, 1, 0))
    #stl_body   = create_object(suzanne_stl, pb.Transform(0,-1, 0))

    kw.add_collision_object(cube_body)
    kw.add_collision_object(cube2_body)
    #kw.add_collision_object(obj_body)
    #kw.add_collision_object(stl_body)

    cube_body.activate(True)
def main(create_figure=False,
         vis_mode=False,
         log_csv=True,
         min_n_dof=1,
         samples=300,
         n_observations=25,
         noise_lin=0.2,
         noise_ang=30,
         noise_steps=5):

    wait_duration = rospy.Duration(0.1)

    vis = ROSBPBVisualizer('ekf_vis', 'world') if vis_mode != 'none' else None
    km = GeometryModel()

    with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'),
              'r') as urdf_file:
        urdf_kitchen_str = urdf_file.read()
        kitchen_model = urdf_filler(
            URDF.from_xml_string(hacky_urdf_parser_fix(urdf_kitchen_str)))
        load_urdf(km, Path('kitchen'), kitchen_model)

    km.clean_structure()
    km.dispatch_events()

    kitchen = km.get_data('kitchen')

    tracking_pools = []
    for name, link in kitchen.links.items():
        symbols = gm.free_symbols(link.pose)
        if len(symbols) == 0:
            continue

        for x in range(len(tracking_pools)):
            syms, l = tracking_pools[x]
            if len(symbols.intersection(syms)
                   ) != 0:  # BAD ALGORITHM, DOES NOT CORRECTLY IDENTIFY SETS
                tracking_pools[x] = (syms.union(symbols),
                                     l + [(name, link.pose)])
                break
        else:
            tracking_pools.append((symbols, [(name, link.pose)]))

    # tracking_pools = [tracking_pools[7]]
    # print('Identified {} tracking pools:\n{}'.format(len(tracking_pools), tracking_pools))

    all_ekfs = [
        EKFModel(dict(poses), km.get_constraints_by_symbols(symbols))
        for symbols, poses in tracking_pools
    ]  # np.eye(len(symbols)) * 0.001
    print('Created {} EKF models'.format(len(all_ekfs)))
    print('\n'.join(str(e) for e in all_ekfs))

    # Sanity constraint
    min_n_dof = min(min_n_dof, len(all_ekfs))

    iteration_times = []

    for u in range(min_n_dof, len(all_ekfs) + 1):
        if rospy.is_shutdown():
            break

        ekfs = all_ekfs[:u]

        observed_poses = {}
        for ekf in ekfs:
            for link_name, _ in ekf.takers:
                observed_poses[link_name] = kitchen.links[link_name].pose
        names, poses = zip(*sorted(observed_poses.items()))

        state_symbols = union([gm.free_symbols(p) for p in poses])
        ordered_state_vars = [
            s for _, s in sorted((str(s), s) for s in state_symbols)
        ]

        state_constraints = {}
        for n, c in km.get_constraints_by_symbols(state_symbols).items():
            if gm.is_symbol(c.expr):
                s = gm.free_symbols(c.expr).pop()
                fs = gm.free_symbols(c.lower).union(gm.free_symbols(c.upper))
                if len(fs.difference({s})) == 0:
                    state_constraints[s] = (float(gm.subs(c.lower, {s: 0})),
                                            float(gm.subs(c.upper, {s: 0})))

        state_bounds = np.array([
            state_constraints[s]
            if s in state_constraints else [-np.pi * 0.5, np.pi * 0.5]
            for s in ordered_state_vars
        ])

        state_fn = gm.speed_up(gm.vstack(*poses), ordered_state_vars)
        subworld = km.get_active_geometry(state_symbols)

        # Generate observation noise
        print('Generating R matrices...')
        n_cov_obs = 400
        full_log = []

        dof_iters = []

        # EXPERIMENT
        for lin_std, ang_std in [(noise_lin, noise_ang * (np.pi / 180.0))]:
            # zip(np.linspace(0, noise_lin, noise_steps),
            #     np.linspace(0, noise_ang * (np.pi / 180.0), noise_steps)):
            if rospy.is_shutdown():
                break
            # INITIALIZE SENSOR MODEL
            training_obs = []
            state = np.random.uniform(state_bounds.T[0], state_bounds.T[1])
            observations = state_fn.call2(state)

            for _ in range(n_cov_obs):
                noisy_obs = {}
                for x, noise in enumerate([
                        t.dot(r) for t, r in zip(
                            random_normal_translation(len(poses), 0, lin_std),
                            random_rot_normal(len(poses), 0, ang_std))
                ]):
                    noisy_obs[names[x]] = observations[x * 4:x * 4 +
                                                       4, :4].dot(noise)
                training_obs.append(noisy_obs)

            for ekf in ekfs:
                ekf.generate_R(training_obs)
                # ekf.set_R(np.eye(len(ekf.ordered_vars)) * 0.1)

            # Generate figure
            gridsize = (4, samples)
            plot_size = (4, 4)
            fig = plt.figure(figsize=(gridsize[1] * plot_size[0], gridsize[0] *
                                      plot_size[1])) if create_figure else None

            gt_states = []
            states = [[] for x in range(samples)]
            variances = [[] for x in range(samples)]
            e_obs = [[] for x in range(samples)]

            print('Starting iterations')
            for k in tqdm(range(samples)):
                if rospy.is_shutdown():
                    break

                state = np.random.uniform(state_bounds.T[0], state_bounds.T[1])
                gt_states.append(state)
                observations = state_fn.call2(state).copy()
                gt_obs_d = {
                    n: observations[x * 4:x * 4 + 4, :4]
                    for x, n in enumerate(names)
                }
                subworld.update_world(dict(zip(ordered_state_vars, state)))

                if vis_mode == 'iter' or vis_mode == 'io':
                    vis.begin_draw_cycle('gt', 'noise', 'estimate', 't_n',
                                         't0')
                    vis.draw_world('gt', subworld, g=0, b=0)
                    vis.render('gt')

                estimates = []
                for ekf in ekfs:
                    particle = ekf.spawn_particle()
                    estimates.append(particle)

                initial_state = dict(
                    sum([[(s, v) for s, v in zip(ekf.ordered_vars, e.state)]
                         for e, ekf in zip(estimates, ekfs)], []))
                initial_state = np.array(
                    [initial_state[s] for s in ordered_state_vars])
                if initial_state.min() < state_bounds.T[0].min(
                ) or initial_state.max() > state_bounds.T[1].max():
                    raise Exception(
                        'Estimate initialization is out of bounds: {}'.format(
                            np.vstack([initial_state, state_bounds.T]).T))
                initial_delta = state - initial_state

                for y in range(n_observations):
                    # Add noise to observations
                    noisy_obs = {}
                    for x, noise in enumerate([
                            t.dot(r) for t, r in zip(
                                random_normal_translation(
                                    len(poses), 0, lin_std),
                                random_rot_normal(len(poses), 0, ang_std))
                    ]):
                        noisy_obs[names[x]] = observations[x * 4:x * 4 +
                                                           4, :4].dot(noise)

                    if vis_mode in {'iter', 'iter-trail'} or (vis_mode == 'io'
                                                              and y == 0):
                        for n, t in noisy_obs.items():
                            subworld.named_objects[Path(
                                ('kitchen', 'links', n))].np_transform = t
                        if vis_mode != 'iter-trail':
                            vis.begin_draw_cycle('noise')
                        vis.draw_world('noise', subworld, r=0, g=0, a=0.1)
                        vis.render('noise')

                    start_time = time()
                    for estimate, ekf in zip(estimates, ekfs):
                        if y > 0:
                            control = np.zeros(len(ekf.ordered_controls))
                            estimate.state, estimate.cov = ekf.predict(
                                estimate.state.flatten(), estimate.cov,
                                control)
                            obs_vector = ekf.gen_obs_vector(noisy_obs)
                            estimate.state, estimate.cov = ekf.update(
                                estimate.state, estimate.cov,
                                ekf.gen_obs_vector(noisy_obs))

                            if vis_mode in {'iter', 'iter-trail'}:
                                subworld.update_world({
                                    s: v
                                    for s, v in zip(ekf.ordered_vars,
                                                    estimate.state)
                                })
                        else:
                            obs_vector = ekf.gen_obs_vector(noisy_obs)

                            for _ in range(1):
                                h_prime = ekf.h_prime_fn.call2(estimate.state)
                                obs_delta = obs_vector.reshape(
                                    (len(obs_vector), 1)) - ekf.h_fn.call2(
                                        estimate.state)
                                estimate.state += (h_prime.T.dot(obs_delta) *
                                                   0.1).reshape(
                                                       estimate.state.shape)

                            if vis_mode in {'iter', 'io'}:
                                subworld.update_world({
                                    s: v
                                    for s, v in zip(ekf.ordered_vars,
                                                    estimate.state)
                                })

                    if vis_mode != 'none' and y == 0:
                        vis.draw_world('t0', subworld, b=0, a=1)
                        vis.render('t0')
                    elif vis_mode in {'iter', 'iter-trail'}:
                        if vis_mode != 'iter-trail':
                            vis.begin_draw_cycle('t_n')
                        vis.draw_world('t_n', subworld, b=0, a=1)
                        vis.render('t_n')

                    if log_csv or fig is not None:
                        e_state_d = dict(
                            sum([[(s, v)
                                  for s, v in zip(ekf.ordered_vars, e.state)]
                                 for e, ekf in zip(estimates, ekfs)], []))
                        covs = dict(
                            sum([[(s, v) for s, v in zip(
                                ekf.ordered_vars, np.sqrt(np.trace(e.cov)))]
                                 for e, ekf in zip(estimates, ekfs)], []))
                        e_state = np.hstack([
                            e_state_d[s] for s in ordered_state_vars
                        ]).reshape((len(e_state_d), ))

                        if log_csv:
                            full_log.append(
                                np.hstack(
                                    ([lin_std,
                                      ang_std], state, e_state.flatten(),
                                     np.array([
                                         covs[s] for s in ordered_state_vars
                                     ]))))

                        if fig is not None:
                            e_obs[k].append(
                                np.array([
                                    np.abs(
                                        ekf.gen_obs_vector(gt_obs_d) -
                                        ekf.h_fn.call2(e.state)).max()
                                    for e, ekf in zip(estimates, ekfs)
                                ]))
                            states[k].append(e_state)
                            variances[k].append(
                                np.array([covs[s]
                                          for s in ordered_state_vars]))
                else:
                    if vis_mode == 'io':
                        for estimate, ekf in zip(estimates, ekfs):
                            subworld.update_world({
                                s: v
                                for s, v in zip(ekf.ordered_vars,
                                                estimate.state)
                            })

                        vis.draw_world('t_n', subworld, r=0, b=0, a=1)
                        vis.render('t_n')

                    dof_iters.append(time() - start_time)

            if fig is not None:
                axes = [
                    plt.subplot2grid(gridsize, (y, 0), colspan=1, rowspan=1)
                    for y in range(gridsize[0])
                ]
                axes = np.array(
                    sum([[
                        plt.subplot2grid(gridsize, (y, x),
                                         colspan=1,
                                         rowspan=1,
                                         sharey=axes[y])
                        for y in range(gridsize[0])
                    ] for x in range(1, gridsize[1])], axes)).reshape(
                        (gridsize[1], gridsize[0]))

                for x, (gt_s, state, variance, obs_delta,
                        (ax_s, ax_d, ax_o, ax_v)) in enumerate(
                            zip(gt_states, states, variances, e_obs, axes)):

                    for y in gt_s:
                        ax_s.axhline(y, xmin=0.97, xmax=1.02)

                    ax_s.set_title('State; Sample: {}'.format(x))
                    ax_d.set_title('Delta from GT; Sample: {}'.format(x))
                    ax_o.set_title('Max Delta in Obs; Sample: {}'.format(x))
                    ax_v.set_title('Standard Deviation; Sample: {}'.format(x))
                    ax_s.plot(state)
                    ax_d.plot(gt_s - np.vstack(state))
                    ax_o.plot(obs_delta)
                    ax_v.plot(variance)
                    ax_s.grid(True)
                    ax_d.grid(True)
                    ax_o.grid(True)
                    ax_v.grid(True)

                fig.tight_layout()
                plt.savefig(
                    res_pkg_path(
                        'package://kineverse_experiment_world/test/ekf_object_tracker_{}_{}.png'
                        .format(lin_std, ang_std)))

        iteration_times.append(dof_iters)

        if log_csv:
            df = pd.DataFrame(
                columns=['lin_std', 'ang_std'] +
                ['gt_{}'.format(x) for x in range(len(state_symbols))] +
                ['ft_{}'.format(x) for x in range(len(state_symbols))] +
                ['var_{}'.format(x) for x in range(len(state_symbols))],
                data=full_log)
            df.to_csv(res_pkg_path(
                'package://kineverse_experiment_world/test/ekf_object_tracker.csv'
            ),
                      index=False)

    df = pd.DataFrame(
        columns=[str(x) for x in range(1,
                                       len(iteration_times) + 1)],
        data=np.vstack(iteration_times).T)
    df.to_csv(res_pkg_path(
        'package://kineverse_experiment_world/test/ekf_object_tracker_performance.csv'
    ),
              index=False)
예제 #7
0
    def test_apply_operation(self):
        self.ops_msg = None
        self.model_msg = None

        def cb_update(ops, model):
            self.ops_msg = ops
            self.model_msg = model

        server = ModelServerForTesting(cb_update)

        def cb_srv_get_model(paths, csymbs):
            return server.srv_get_model(
                bb(paths=paths, constrained_symbols=csymbs))

        def cb_srv_get_constraints(csymbs):
            return server.srv_get_constraints(bb(symbols=csymbs))

        op_client = OperationsClientForTesting(None, True)
        m_client = ModelClientForTesting(cb_srv_get_model,
                                         cb_srv_get_constraints, None)
        m_client.has_data('testbot')

        urdf_model = URDF.from_xml_file(
            res_pkg_path('package://kineverse/urdf/testbot.urdf'))

        load_urdf(op_client, urdf_model.name, urdf_model)

        op_list = op_client.apply_changes()

        req = bb(operations=op_list)

        server.srv_apply_operations(req)
        server.srv_apply_operations(req)

        op_client.cb_operations_update(self.ops_msg)

        with open('update.json', 'w') as f:
            json.dump(dict(
                zip(self.model_msg.update.paths, self.model_msg.update.data)),
                      f,
                      indent=2)

        m_client.cb_model_update(self.model_msg)
        o_model = server.km.get_data('testbot')
        t_model = m_client.get_data('testbot')

        with open('original.json', 'w') as f:
            json.dump(o_model, f, indent=True)

        with open('transmitted.json', 'w') as f:
            json.dump(t_model, f, indent=True)

        self.assertEquals(o_model.name, t_model.name)

        l = o_model.links['arm_mount_link']
        lt = t_model.links['arm_mount_link']

        for n, l in o_model.links.items():
            if n not in t_model.links:
                print('Link {} missing from transmitted model'.format(n))
                continue
            lt = t_model.links[n]
        if l == lt:
            print('Equality check passed for link {}'.format(n))
            print('Types: {} {}'.format(type(l), type(lt)))
            print('parent: {}'.format(lt.parent == l.parent))
            print('pose: {}'.format(lt.pose == l.pose))
            print('to_parent: {}'.format(lt.to_parent == l.to_parent))
            print('geometry: {}'.format(lt.geometry == l.geometry))
            print('collision: {}'.format(lt.collision == l.collision))
            print('inertial: {}\n---'.format(lt.inertial == l.inertial))
        else:
            print('Equality check failed for link {}'.format('arm_mount_link'))
            print('Types: {} {}'.format(type(l), type(lt)))
            print('parent: {}'.format(lt.parent == l.parent))
            print('pose: {}\n{}'.format(lt.pose == l.pose, l.pose - lt.pose))
            print('to_parent: {}\n{}'.format(lt.to_parent == l.to_parent,
                                             l.to_parent - lt.to_parent))
            print('geometry: {}'.format(lt.geometry == l.geometry))
            print('collision: {}'.format(lt.collision == l.collision))
            print('inertial: {}'.format(lt.inertial == l.inertial))

        self.assertEquals(o_model.joints, t_model.joints)
        self.assertEquals(o_model, t_model)
예제 #8
0
if __name__ == '__main__':
    rospy.init_node('kineverse_roomba_uploader')

    if len(sys.argv) < 3:
        print(
            'Usage: [urdf file] wheel_radius wheel_distance [fill_collision_tags]'
        )
        exit()
    elif len(sys.argv) < 4:
        urdf_model = URDF.from_xml_string(
            rospy.get_param('/robot_description'))
        radius = float(sys.argv[1])
        distance = float(sys.argv[2])
    else:
        urdf_model = URDF.from_xml_file(res_pkg_path(sys.argv[1]))
        radius = float(sys.argv[2])
        distance = float(sys.argv[3])

    # Fill collision geometry if demanded
    if len(sys.argv) >= 5 and (sys.argv[4].lower() == 'true'
                               or sys.argv[4] == '1'):
        urdf_model = urdf_filler(urdf_model)

    op_client = OperationsClient(EventModel, True)

    load_urdf(op_client, urdf_model.name, urdf_model)
    op_client.apply_operation_before(
        'create world', 'create {}'.format(urdf_model.name),
        CreateComplexObject(Path('world'), Frame('')))
예제 #9
0
from kineverse.utils import res_pkg_path
from kineverse.visualization.graph_generator import generate_modifications_graph,\
                                                    generate_dependency_graph,   \
                                                    plot_graph
from kineverse.visualization.plotting import draw_recorders, split_recorders

from sensor_msgs.msg import JointState as JointStateMsg
from trajectory_msgs.msg import JointTrajectory as JointTrajectoryMsg
from trajectory_msgs.msg import JointTrajectoryPoint as JointTrajectoryPointMsg

from urdf_parser_py.urdf import URDF

if __name__ == '__main__':
    rospy.init_node('kineverse_sandbox')

    plot_dir = res_pkg_path('package://kineverse/test/plots')
    pub_path = '/opt/ros/{}/lib/robot_state_publisher/robot_state_publisher'.format(
        os.environ['ROS_DISTRO'])

    with open(res_pkg_path('package://fetch_description/robots/fetch.urdf'),
              'r') as urdf_file:
        urdf_str = urdf_file.read()

    rospy.set_param('/robot_description', urdf_str)

    js_pub = rospy.Publisher('/joint_states', JointStateMsg, queue_size=1)
    traj_pup = rospy.Publisher('/fetch/commands/joint_trajectory',
                               JointTrajectoryMsg,
                               queue_size=1)
    tf_broadcaster = tf.TransformBroadcaster()
            'l_forearm_roll_joint': 0.0,
            'l_wrist_flex_joint': -1.05,
            'r_elbow_flex_joint': -2.1213,
            'r_shoulder_lift_joint': 1.2963,
            'r_shoulder_pan_joint': 0.0,
            'r_upper_arm_roll_joint': 0.0,
            'r_forearm_roll_joint': 0.0,
            'r_wrist_flex_joint': -1.05,
            'torso_lift_joint': 0.16825
        }
        closer = ObsessiveObjectCloser(
            Path('iai_oven_area'),
            Path('pr2/links/r_gripper_r_finger_tip_link'),
            Path('pr2/links/head_mount_link'),
            '/tracked/state',
            '/pr2',
            resting_pose=resting_pose)

    while not rospy.is_shutdown():
        rospy.sleep(1000)

    if closer.pushing_controller is not None and PANDA_LOGGING:
        recW, recB, recC, recs = convert_qp_builder_log(
            closer.pushing_controller)

        draw_recorders([recW, recB, recC] + [
            r for _, r in sorted(recs.items())
        ], 1, 12, 6).savefig(
            res_pkg_path(
                'package://kineverse_experiment_world/test/keep_contact.png'))
                    ) in symbolic_types and u_const_name not in generated_constraints:
                        new_goals[u_const_name] = SoftConstraint(
                            less_than(c.upper, 0), 1000, goal.weight, c.upper)
                        generated_constraints.add(u_const_name)
                    elif diff_sign[s] < 0 and type(
                            c.lower
                    ) in symbolic_types and l_const_name not in generated_constraints:
                        new_goals[l_const_name] = SoftConstraint(
                            -1000, -greater_than(c.lower, 0), goal.weight,
                            c.lower)
                        generated_constraints.add(l_const_name)

        final_goals.update(
            lock_explorer(km, state, new_goals, generated_constraints))

    return final_goals


if __name__ == '__main__':
    scenario = LockboxOpeningGenerator()

    scenario.run(0.19)

    draw_recorders([
        scenario.value_recorder, scenario.vel_recorder,
        scenario.symbol_recorder
    ], 4.0 / 9.0, 10, 3).savefig(
        res_pkg_path(
            'package://kineverse_experiment_world/test/plots/lockbox_opening.png'
        ))
                                  Path(f'{prefix}/links/handle')))
    km.apply_operation(
        f'add lock {door_position}',
        ConditionalDoorHandleConstraints(door_position, handle_position,
                                         math.pi * 0.01, math.pi * 0.15))


if __name__ == '__main__':
    rospy.init_node('kineverse_door_opening')

    visualizer = ROSBPBVisualizer('~visualization', base_frame='world')

    km = GeometryModel()

    with open(
            res_pkg_path(
                'package://kineverse_experiment_world/urdf/iiwa_wsg_50.urdf'),
            'r') as f:
        iiwa_urdf = urdf_filler(
            URDF.from_xml_string(hacky_urdf_parser_fix(f.read())))
        load_urdf(km, Path('iiwa'), iiwa_urdf)

    km.clean_structure()
    km.dispatch_events()

    door_x, door_y = [gm.Position(f'localization_{x}') for x in 'xy']

    create_door(km,
                Path('door'),
                0.5,
                0.35,
                to_world_tf=gm.translation3(door_x, door_y, 0.1))
    if args.link is not None:
        robot_link = args.link
    elif robot == 'pr2':
        robot_link = 'r_gripper_r_finger_tip_link'
    else:
        robot_link = 'gripper_link'  # 'r_gripper_finger_link'

    camera_link = args.camera
    if camera_link is None:
        if args.robot == 'fetch':
            camera_link = 'head_camera_rgb_optical_frame'
        elif args.robot == 'pr2':
            camera_link = 'head_mount_kinect_rgb_optical_frame'

    plot_dir = res_pkg_path('package://kineverse/test/plots')

    # Loading of models
    if robot != 'pr2':
        with open(
                res_pkg_path(
                    'package://{r}_description/robots/{r}.urdf'.format(
                        r=robot)), 'r') as urdf_file:
            urdf_str = hacky_urdf_parser_fix(urdf_file.read())
    else:
        with open(
                res_pkg_path(
                    'package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml'
                ), 'r') as urdf_file:
            urdf_str = hacky_urdf_parser_fix(urdf_file.read())
예제 #14
0
from kineverse.motion.min_qp_builder               import TypedQPBuilder  as TQPB, \
                                                          SoftConstraint  as SC, \
                                                          ControlledValue as CV, \
                                                          generate_controlled_values
from kineverse.motion.integrator import CommandIntegrator
from kineverse.visualization.bpb_visualizer import ROSBPBVisualizer
from kineverse.visualization.plotting import draw_recorders, split_recorders
from kineverse.visualization.trajectory_visualizer import TrajectoryVisualizer

from urdf_parser_py.urdf import URDF

if __name__ == '__main__':
    rospy.init_node('microwave_sandbox')

    micro_urdf = URDF.from_xml_file(
        res_pkg_path('package://kineverse/urdf/microwave.urdf'))

    km = EventModel()
    #km = GeometryModel()
    load_urdf(km, Path('microwave'), micro_urdf)

    traj_vis = TrajectoryVisualizer(ROSBPBVisualizer('/bullet_test', '/map'))
    traj_vis.add_articulated_object(micro_urdf, km.get_data('microwave'))

    door_position = km.get_data('microwave/joints/door_joint').position
    button_position = km.get_data('microwave/joints/button_joint').position
    button_min = -0.02
    door_velocity = get_diff_symbol(door_position)
    door_accel = get_diff_symbol(door_velocity)
    point_of_release = 0.8 * button_min
    button_pushed = less_than(button_position, point_of_release)
예제 #15
0
    #node.init(mode='direct')

    req = AddRigidObjectRequest()
    req.geom_type = 'box'
    req.extents.x = req.extents.y = 10
    req.extents.z = 1
    req.pose.position.z = -0.5
    req.pose.orientation.w = 1
    req.name = 'floor'

    node.srv_add_rigid_body(req)  # This is dumb!

    tmp_urdf = open('/tmp/temp_urdf.urdf', 'w')  #tempfile.NamedTemporaryFile()
    filled_model = urdf_filler(
        URDF.from_xml_file(
            res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf')))
    tmp_urdf.write(filled_model.to_xml_string())
    tmp_urdf.close()
    #tmp_urdf.write(urdf_filler(URDF.from_xml_file(res_pkg_path('package://faculty_lounge_kitchen_description/urdfs/kitchen.urdf'))).to_xml_string())
    #snd_urdf = URDF.from_xml_file('/tmp/temp_urdf.urdf')

    req = AddURDFRequest()
    req.urdf_path = '/tmp/temp_urdf.urdf'  #'package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'
    req.name = 'iai_kitchen'
    req.fixed_base = True
    req.pose.orientation.w = 1

    node.srv_add_urdf(req)  # This is reeeeeeally stupid!

    angle = math.pi * 0
                        help='Number of steps from lowest to highest noise.')
    parser.add_argument('--out',
                        '-o',
                        type=str,
                        default='tracker_results_n_dof.csv',
                        help='Name of the resulting csv file.')
    args = parser.parse_args()

    rospy.init_node('kineverse_tracking_node')
    tracker = TrackerNode('/tracked/state',
                          '/pose_obs',
                          args.step,
                          args.max_iter,
                          use_timer=False)

    with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'),
              'r') as urdf_file:
        urdf_kitchen_str = urdf_file.read()

    kitchen_model = urdf_filler(URDF.from_xml_string(urdf_kitchen_str))
    load_urdf(tracker.km_client, Path('iai_oven_area'), kitchen_model)

    tracker.km_client.clean_structure()
    tracker.km_client.dispatch_events()

    groups = [
        [('iai_oven_area/links/room_link', 'iai_oven_area/room_link'),
         ('iai_oven_area/links/fridge_area', 'iai_oven_area/fridge_area'),
         ('iai_oven_area/links/fridge_area_footprint',
          'iai_oven_area/fridge_area_footprint'),
         ('iai_oven_area/links/fridge_area_lower_drawer_handle',
예제 #17
0
def load_convex_mesh_shape(pkg_filename):
    return pb.load_convex_shape(res_pkg_path(pkg_filename))
def bpb():
    import numpy as np

    import kineverse.bpb_wrapper as pb
    import kineverse.model.geometry_model as gm
    import kineverse.operations.urdf_operations as urdf

    from kineverse.urdf_fix import urdf_filler
    from urdf_parser_py.urdf import URDF

    km = gm.GeometryModel()

    with open(
            res_pkg_path(
                'package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml'
            ), 'r') as urdf_file:
        urdf.load_urdf(km, 'pr2',
                       urdf_filler(URDF.from_xml_string(urdf_file.read())))

    with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'),
              'r') as urdf_file:
        urdf.load_urdf(km, 'kitchen',
                       urdf_filler(URDF.from_xml_string(urdf_file.read())))

    km.clean_structure()
    km.dispatch_events()

    kitchen = km.get_data('kitchen')
    pr2 = km.get_data('pr2')

    joint_symbols = {
        j.position
        for j in kitchen.joints.values() if hasattr(j, 'position')
    }
    joint_symbols |= {
        j.position
        for j in pr2.joints.values() if hasattr(j, 'position')
    }

    coll_world = km.get_active_geometry(joint_symbols)

    robot_parts = {
        n: o
        for n, o in coll_world.named_objects.items() if n[:4] == 'pr2/'
    }

    batch = {o: 2.0 for o in robot_parts.values()}

    print(
        'Benchmarking by querying distances for {} objects. Total object count: {}.\n'
        .format(len(batch), len(coll_world.names)))

    dur_update = []
    dur_distances = []

    # print('Mesh files:\n{}'.format('\n'.join(sorted({pb.pb.get_shape_filename(s) for s in sum([o.collision_shape.child_shapes for o in coll_world.collision_objects], [])}))))

    # print('Objects:\n{}'.format('\n'.join(['{}:\n  {}\n  {} {}'.format(n,
    #                                                                    o.transform.rotation,
    #                                                                    o.transform.origin,
    #                                                                    pb.pb.get_shape_filename(o.collision_shape.get_child(0)))
    #                                                                    for n, o in coll_world.named_objects.items()])))

    for x in tqdm(range(100)):
        start = Time.now()
        coll_world.update_world({
            s: v
            for s, v in zip(joint_symbols, np.random.rand(len(joint_symbols)))
        })
        dur_update.append(Time.now() - start)

        start = Time.now()
        distances = coll_world.closest_distances(batch)
        dur_distances.append(Time.now() - start)

    dur_update_mean = sum([d.to_sec() for d in dur_update]) / len(dur_update)
    dur_distances_mean = sum([d.to_sec()
                              for d in dur_distances]) / len(dur_distances)

    print('Update mean: {}\nUpdate max: {}\n'
          'Distances mean: {}\nDistances max: {}\n'.format(
              dur_update_mean,
              max(dur_update).to_sec(),
              dur_distances_mean,
              max(dur_distances).to_sec(),
          ))
예제 #19
0
from kineverse.utils                   import res_pkg_path

if __name__ == '__main__':
    

    eef  = point3(1,0,0)
    goal = rotation3_rpy(0, -0.8, 2.56) * point3(1, 0, 0)

    rpy_model = rotation3_rpy(Position('roll'), Position('pitch'), Position('yaw')) * eef

    axis = vector3(Position('x'), Position('y'), 0) #Position('z'))
    ax_model  = rotation3_axis_angle(axis / (norm(axis) + 1e-4), norm(axis)) * eef

    goal_rpy = SC(-norm(goal - rpy_model), -norm(goal - rpy_model), 1, norm(goal - rpy_model))
    goal_ax  = SC(-norm(goal -  ax_model), -norm(goal -  ax_model), 1, norm(goal -  ax_model))

    rpy_integrator = CommandIntegrator(TQPB({}, {'rpy': goal_rpy}, 
                                            {str(s): CV(-0.6, 0.6, get_diff_symbol(s), 0.001) for s in rpy_model.free_symbols}),
                                            recorded_terms={'dist rpy': norm(goal - rpy_model)})

    ax_integrator  = CommandIntegrator(TQPB({}, {'ax': goal_ax}, 
                                            {str(s): CV(-0.6, 0.6, get_diff_symbol(s), 0.001) for s in ax_model.free_symbols}),
                                            recorded_terms={'dist ax': norm(goal - ax_model)})
    rpy_integrator.restart('Convergence Test: RPY vs AxAngle')
    rpy_integrator.run()
    ax_integrator.restart('Convergence Test: RPY vs AxAngle')
    ax_integrator.run()

    draw_recorders([rpy_integrator.recorder, ax_integrator.recorder, 
                    rpy_integrator.sym_recorder, ax_integrator.sym_recorder], 1, 4, 4).savefig('{}/rpy_vs_axis_angle.png'.format(res_pkg_path('package://kineverse_experiment_world/test')))
예제 #20
0
 def test_load(self):
     urdf_model = URDF.from_xml_file(res_pkg_path('package://kineverse/urdf/testbot.urdf'))
     ks = ArticulationModel()
     load_urdf(ks, Path(urdf_model.name), urdf_model)
def srv_image_to_object(req):
    res = ImageToObjectResponseMsg()
    res.objects = fake_observation()
    return res


if __name__ == '__main__':
    if len(sys.argv) < 2:
        print('SDF file needed.')
        exit(0)

    kin_links = None
    for a in sys.argv[1:]:
        if not ':=' in a:
            world_path = res_pkg_path(a)
            print('Loading world file {}'.format(world_path))
            sdf = SDF(load_xml(world_path))
            kin_links = world_to_links(sdf.worlds.values()[0], Path('bla'),
                                       False)
            break
    if kin_links is None:
        print('No SDF file was given.')
        exit(0)

    rospy.init_node('fake_observation')

    robot_name = rospy.get_param('~robot_name', 'fetch')
    width = rospy.get_param('~width', 640)
    height = rospy.get_param('~height', 480)
    near = rospy.get_param('~near', 0.2)
예제 #22
0
    'wrist_roll_joint': 0.0,
    'shoulder_pan_joint': 1.32,
    'elbow_flex_joint': 1.72,
    'forearm_roll_joint': 0.0,
    'upperarm_roll_joint': -0.2,
    'wrist_flex_joint': 1.66,
    'shoulder_lift_joint': 1.4,
    'torso_lift_joint': 0.2
}

use_omni = False

if __name__ == '__main__':
    rospy.init_node('kineverse_sandbox')

    plot_dir = res_pkg_path('package://kineverse/test/plots')

    with open(res_pkg_path('package://fetch_description/robots/fetch.urdf'),
              'r') as urdf_file:
        urdf_str = urdf_file.read()

    with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'),
              'r') as urdf_file:
        urdf_kitchen_str = urdf_file.read()

    urdf_model = URDF.from_xml_string(urdf_str)
    kitchen_model = urdf_filler(URDF.from_xml_string(urdf_kitchen_str))

    traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format(
        urdf_model.name),
                               JointTrajectoryMsg,