def from_dict(cls, init_dict): km = GeometryModel() urdf = urdf_filler(URDF.from_xml_string(rospy.get_param('/robot_description'))) load_urdf(km, Path(urdf.name), urdf) km.clean_structure() km.dispatch_events() base_frame = init_dict['reference_frame'] eefs = [Endeffector.from_dict(km.get_data(Path(urdf.name)), base_frame, d) for d in init_dict['links']] return cls(km, Path(urdf.name), eefs)
res_pkg_path( 'package://{r}_description/robots/{r}.urdf'.format( r=robot)), 'r') as urdf_file: urdf_str = hacky_urdf_parser_fix(urdf_file.read()) else: with open( res_pkg_path( 'package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml' ), 'r') as urdf_file: urdf_str = hacky_urdf_parser_fix(urdf_file.read()) with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'), 'r') as urdf_file: urdf_kitchen_str = hacky_urdf_parser_fix(urdf_file.read()) urdf_model = urdf_filler( URDF.from_xml_string(hacky_urdf_parser_fix(urdf_str))) kitchen_model = urdf_filler( URDF.from_xml_string(hacky_urdf_parser_fix(urdf_kitchen_str))) # traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( urdf_model.name), JointTrajectoryMsg, queue_size=1) kitchen_traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( kitchen_model.name), JointTrajectoryMsg, queue_size=1) # KINEMATIC MODEL km = GeometryModel()
def main(create_figure=False, vis_mode=False, log_csv=True, min_n_dof=1, samples=300, n_observations=25, noise_lin=0.2, noise_ang=30, noise_steps=5): wait_duration = rospy.Duration(0.1) vis = ROSBPBVisualizer('ekf_vis', 'world') if vis_mode != 'none' else None km = GeometryModel() with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'), 'r') as urdf_file: urdf_kitchen_str = urdf_file.read() kitchen_model = urdf_filler( URDF.from_xml_string(hacky_urdf_parser_fix(urdf_kitchen_str))) load_urdf(km, Path('kitchen'), kitchen_model) km.clean_structure() km.dispatch_events() kitchen = km.get_data('kitchen') tracking_pools = [] for name, link in kitchen.links.items(): symbols = gm.free_symbols(link.pose) if len(symbols) == 0: continue for x in range(len(tracking_pools)): syms, l = tracking_pools[x] if len(symbols.intersection(syms) ) != 0: # BAD ALGORITHM, DOES NOT CORRECTLY IDENTIFY SETS tracking_pools[x] = (syms.union(symbols), l + [(name, link.pose)]) break else: tracking_pools.append((symbols, [(name, link.pose)])) # tracking_pools = [tracking_pools[7]] # print('Identified {} tracking pools:\n{}'.format(len(tracking_pools), tracking_pools)) all_ekfs = [ EKFModel(dict(poses), km.get_constraints_by_symbols(symbols)) for symbols, poses in tracking_pools ] # np.eye(len(symbols)) * 0.001 print('Created {} EKF models'.format(len(all_ekfs))) print('\n'.join(str(e) for e in all_ekfs)) # Sanity constraint min_n_dof = min(min_n_dof, len(all_ekfs)) iteration_times = [] for u in range(min_n_dof, len(all_ekfs) + 1): if rospy.is_shutdown(): break ekfs = all_ekfs[:u] observed_poses = {} for ekf in ekfs: for link_name, _ in ekf.takers: observed_poses[link_name] = kitchen.links[link_name].pose names, poses = zip(*sorted(observed_poses.items())) state_symbols = union([gm.free_symbols(p) for p in poses]) ordered_state_vars = [ s for _, s in sorted((str(s), s) for s in state_symbols) ] state_constraints = {} for n, c in km.get_constraints_by_symbols(state_symbols).items(): if gm.is_symbol(c.expr): s = gm.free_symbols(c.expr).pop() fs = gm.free_symbols(c.lower).union(gm.free_symbols(c.upper)) if len(fs.difference({s})) == 0: state_constraints[s] = (float(gm.subs(c.lower, {s: 0})), float(gm.subs(c.upper, {s: 0}))) state_bounds = np.array([ state_constraints[s] if s in state_constraints else [-np.pi * 0.5, np.pi * 0.5] for s in ordered_state_vars ]) state_fn = gm.speed_up(gm.vstack(*poses), ordered_state_vars) subworld = km.get_active_geometry(state_symbols) # Generate observation noise print('Generating R matrices...') n_cov_obs = 400 full_log = [] dof_iters = [] # EXPERIMENT for lin_std, ang_std in [(noise_lin, noise_ang * (np.pi / 180.0))]: # zip(np.linspace(0, noise_lin, noise_steps), # np.linspace(0, noise_ang * (np.pi / 180.0), noise_steps)): if rospy.is_shutdown(): break # INITIALIZE SENSOR MODEL training_obs = [] state = np.random.uniform(state_bounds.T[0], state_bounds.T[1]) observations = state_fn.call2(state) for _ in range(n_cov_obs): noisy_obs = {} for x, noise in enumerate([ t.dot(r) for t, r in zip( random_normal_translation(len(poses), 0, lin_std), random_rot_normal(len(poses), 0, ang_std)) ]): noisy_obs[names[x]] = observations[x * 4:x * 4 + 4, :4].dot(noise) training_obs.append(noisy_obs) for ekf in ekfs: ekf.generate_R(training_obs) # ekf.set_R(np.eye(len(ekf.ordered_vars)) * 0.1) # Generate figure gridsize = (4, samples) plot_size = (4, 4) fig = plt.figure(figsize=(gridsize[1] * plot_size[0], gridsize[0] * plot_size[1])) if create_figure else None gt_states = [] states = [[] for x in range(samples)] variances = [[] for x in range(samples)] e_obs = [[] for x in range(samples)] print('Starting iterations') for k in tqdm(range(samples)): if rospy.is_shutdown(): break state = np.random.uniform(state_bounds.T[0], state_bounds.T[1]) gt_states.append(state) observations = state_fn.call2(state).copy() gt_obs_d = { n: observations[x * 4:x * 4 + 4, :4] for x, n in enumerate(names) } subworld.update_world(dict(zip(ordered_state_vars, state))) if vis_mode == 'iter' or vis_mode == 'io': vis.begin_draw_cycle('gt', 'noise', 'estimate', 't_n', 't0') vis.draw_world('gt', subworld, g=0, b=0) vis.render('gt') estimates = [] for ekf in ekfs: particle = ekf.spawn_particle() estimates.append(particle) initial_state = dict( sum([[(s, v) for s, v in zip(ekf.ordered_vars, e.state)] for e, ekf in zip(estimates, ekfs)], [])) initial_state = np.array( [initial_state[s] for s in ordered_state_vars]) if initial_state.min() < state_bounds.T[0].min( ) or initial_state.max() > state_bounds.T[1].max(): raise Exception( 'Estimate initialization is out of bounds: {}'.format( np.vstack([initial_state, state_bounds.T]).T)) initial_delta = state - initial_state for y in range(n_observations): # Add noise to observations noisy_obs = {} for x, noise in enumerate([ t.dot(r) for t, r in zip( random_normal_translation( len(poses), 0, lin_std), random_rot_normal(len(poses), 0, ang_std)) ]): noisy_obs[names[x]] = observations[x * 4:x * 4 + 4, :4].dot(noise) if vis_mode in {'iter', 'iter-trail'} or (vis_mode == 'io' and y == 0): for n, t in noisy_obs.items(): subworld.named_objects[Path( ('kitchen', 'links', n))].np_transform = t if vis_mode != 'iter-trail': vis.begin_draw_cycle('noise') vis.draw_world('noise', subworld, r=0, g=0, a=0.1) vis.render('noise') start_time = time() for estimate, ekf in zip(estimates, ekfs): if y > 0: control = np.zeros(len(ekf.ordered_controls)) estimate.state, estimate.cov = ekf.predict( estimate.state.flatten(), estimate.cov, control) obs_vector = ekf.gen_obs_vector(noisy_obs) estimate.state, estimate.cov = ekf.update( estimate.state, estimate.cov, ekf.gen_obs_vector(noisy_obs)) if vis_mode in {'iter', 'iter-trail'}: subworld.update_world({ s: v for s, v in zip(ekf.ordered_vars, estimate.state) }) else: obs_vector = ekf.gen_obs_vector(noisy_obs) for _ in range(1): h_prime = ekf.h_prime_fn.call2(estimate.state) obs_delta = obs_vector.reshape( (len(obs_vector), 1)) - ekf.h_fn.call2( estimate.state) estimate.state += (h_prime.T.dot(obs_delta) * 0.1).reshape( estimate.state.shape) if vis_mode in {'iter', 'io'}: subworld.update_world({ s: v for s, v in zip(ekf.ordered_vars, estimate.state) }) if vis_mode != 'none' and y == 0: vis.draw_world('t0', subworld, b=0, a=1) vis.render('t0') elif vis_mode in {'iter', 'iter-trail'}: if vis_mode != 'iter-trail': vis.begin_draw_cycle('t_n') vis.draw_world('t_n', subworld, b=0, a=1) vis.render('t_n') if log_csv or fig is not None: e_state_d = dict( sum([[(s, v) for s, v in zip(ekf.ordered_vars, e.state)] for e, ekf in zip(estimates, ekfs)], [])) covs = dict( sum([[(s, v) for s, v in zip( ekf.ordered_vars, np.sqrt(np.trace(e.cov)))] for e, ekf in zip(estimates, ekfs)], [])) e_state = np.hstack([ e_state_d[s] for s in ordered_state_vars ]).reshape((len(e_state_d), )) if log_csv: full_log.append( np.hstack( ([lin_std, ang_std], state, e_state.flatten(), np.array([ covs[s] for s in ordered_state_vars ])))) if fig is not None: e_obs[k].append( np.array([ np.abs( ekf.gen_obs_vector(gt_obs_d) - ekf.h_fn.call2(e.state)).max() for e, ekf in zip(estimates, ekfs) ])) states[k].append(e_state) variances[k].append( np.array([covs[s] for s in ordered_state_vars])) else: if vis_mode == 'io': for estimate, ekf in zip(estimates, ekfs): subworld.update_world({ s: v for s, v in zip(ekf.ordered_vars, estimate.state) }) vis.draw_world('t_n', subworld, r=0, b=0, a=1) vis.render('t_n') dof_iters.append(time() - start_time) if fig is not None: axes = [ plt.subplot2grid(gridsize, (y, 0), colspan=1, rowspan=1) for y in range(gridsize[0]) ] axes = np.array( sum([[ plt.subplot2grid(gridsize, (y, x), colspan=1, rowspan=1, sharey=axes[y]) for y in range(gridsize[0]) ] for x in range(1, gridsize[1])], axes)).reshape( (gridsize[1], gridsize[0])) for x, (gt_s, state, variance, obs_delta, (ax_s, ax_d, ax_o, ax_v)) in enumerate( zip(gt_states, states, variances, e_obs, axes)): for y in gt_s: ax_s.axhline(y, xmin=0.97, xmax=1.02) ax_s.set_title('State; Sample: {}'.format(x)) ax_d.set_title('Delta from GT; Sample: {}'.format(x)) ax_o.set_title('Max Delta in Obs; Sample: {}'.format(x)) ax_v.set_title('Standard Deviation; Sample: {}'.format(x)) ax_s.plot(state) ax_d.plot(gt_s - np.vstack(state)) ax_o.plot(obs_delta) ax_v.plot(variance) ax_s.grid(True) ax_d.grid(True) ax_o.grid(True) ax_v.grid(True) fig.tight_layout() plt.savefig( res_pkg_path( 'package://kineverse_experiment_world/test/ekf_object_tracker_{}_{}.png' .format(lin_std, ang_std))) iteration_times.append(dof_iters) if log_csv: df = pd.DataFrame( columns=['lin_std', 'ang_std'] + ['gt_{}'.format(x) for x in range(len(state_symbols))] + ['ft_{}'.format(x) for x in range(len(state_symbols))] + ['var_{}'.format(x) for x in range(len(state_symbols))], data=full_log) df.to_csv(res_pkg_path( 'package://kineverse_experiment_world/test/ekf_object_tracker.csv' ), index=False) df = pd.DataFrame( columns=[str(x) for x in range(1, len(iteration_times) + 1)], data=np.vstack(iteration_times).T) df.to_csv(res_pkg_path( 'package://kineverse_experiment_world/test/ekf_object_tracker_performance.csv' ), index=False)
plot_dir = res_pkg_path('package://kineverse/test/plots') pub_path = '/opt/ros/{}/lib/robot_state_publisher/robot_state_publisher'.format( os.environ['ROS_DISTRO']) with open( res_pkg_path( 'package://faculty_lounge_kitchen_description/urdfs/kitchen.urdf' ), 'r') as urdf_file: kitchen_urdf_str = urdf_file.read() with open(res_pkg_path('package://fetch_description/robots/fetch.urdf'), 'r') as urdf_file: fetch_urdf_str = urdf_file.read() kitchen_urdf_model = urdf_filler(URDF.from_xml_string(kitchen_urdf_str)) rospy.set_param('/{}/robot_description'.format(kitchen_urdf_model.name), kitchen_urdf_str) kitchen_js_pub = rospy.Publisher('/{}/joint_states'.format( kitchen_urdf_model.name), JointStateMsg, queue_size=1) tf_broadcaster = tf.TransformBroadcaster() # KINEMATIC MODEL km = GeometryModel() kitchen_prefix = Path(kitchen_urdf_model.name) load_urdf(km, kitchen_prefix, kitchen_urdf_model) plot_graph(generate_dependency_graph(km, {'connect': 'blue'}),
) exit() elif len(sys.argv) < 4: urdf_model = URDF.from_xml_string( rospy.get_param('/robot_description')) radius = float(sys.argv[1]) distance = float(sys.argv[2]) else: urdf_model = URDF.from_xml_file(res_pkg_path(sys.argv[1])) radius = float(sys.argv[2]) distance = float(sys.argv[3]) # Fill collision geometry if demanded if len(sys.argv) >= 5 and (sys.argv[4].lower() == 'true' or sys.argv[4] == '1'): urdf_model = urdf_filler(urdf_model) op_client = OperationsClient(EventModel, True) load_urdf(op_client, urdf_model.name, urdf_model) op_client.apply_operation_before( 'create world', 'create {}'.format(urdf_model.name), CreateComplexObject(Path('world'), Frame(''))) r_limit = 1.0 / (pi * radius * 2) drive_op = create_diff_drive_joint_with_symbols( Path('world/pose'), Path('{}/links/{}/pose'.format(urdf_model.name, urdf_model.get_root())), Path('{}/joints/to_world'.format(urdf_model.name)), radius, distance,
ConditionalDoorHandleConstraints(door_position, handle_position, math.pi * 0.01, math.pi * 0.15)) if __name__ == '__main__': rospy.init_node('kineverse_door_opening') visualizer = ROSBPBVisualizer('~visualization', base_frame='world') km = GeometryModel() with open( res_pkg_path( 'package://kineverse_experiment_world/urdf/iiwa_wsg_50.urdf'), 'r') as f: iiwa_urdf = urdf_filler( URDF.from_xml_string(hacky_urdf_parser_fix(f.read()))) load_urdf(km, Path('iiwa'), iiwa_urdf) km.clean_structure() km.dispatch_events() door_x, door_y = [gm.Position(f'localization_{x}') for x in 'xy'] create_door(km, Path('door'), 0.5, 0.35, to_world_tf=gm.translation3(door_x, door_y, 0.1)) km.clean_structure() km.dispatch_events()
node.init(config_dict={'tf_world_frame': 'world'}, mode='gui') #node.init(mode='direct') req = AddRigidObjectRequest() req.geom_type = 'box' req.extents.x = req.extents.y = 10 req.extents.z = 1 req.pose.position.z = -0.5 req.pose.orientation.w = 1 req.name = 'floor' node.srv_add_rigid_body(req) # This is dumb! tmp_urdf = open('/tmp/temp_urdf.urdf', 'w') #tempfile.NamedTemporaryFile() filled_model = urdf_filler( URDF.from_xml_file( res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'))) tmp_urdf.write(filled_model.to_xml_string()) tmp_urdf.close() #tmp_urdf.write(urdf_filler(URDF.from_xml_file(res_pkg_path('package://faculty_lounge_kitchen_description/urdfs/kitchen.urdf'))).to_xml_string()) #snd_urdf = URDF.from_xml_file('/tmp/temp_urdf.urdf') req = AddURDFRequest() req.urdf_path = '/tmp/temp_urdf.urdf' #'package://iai_kitchen/urdf_obj/IAI_kitchen.urdf' req.name = 'iai_kitchen' req.fixed_base = True req.pose.orientation.w = 1 node.srv_add_urdf(req) # This is reeeeeeally stupid! angle = math.pi * 0
default='tracker_results_n_dof.csv', help='Name of the resulting csv file.') args = parser.parse_args() rospy.init_node('kineverse_tracking_node') tracker = TrackerNode('/tracked/state', '/pose_obs', args.step, args.max_iter, use_timer=False) with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'), 'r') as urdf_file: urdf_kitchen_str = urdf_file.read() kitchen_model = urdf_filler(URDF.from_xml_string(urdf_kitchen_str)) load_urdf(tracker.km_client, Path('iai_oven_area'), kitchen_model) tracker.km_client.clean_structure() tracker.km_client.dispatch_events() groups = [ [('iai_oven_area/links/room_link', 'iai_oven_area/room_link'), ('iai_oven_area/links/fridge_area', 'iai_oven_area/fridge_area'), ('iai_oven_area/links/fridge_area_footprint', 'iai_oven_area/fridge_area_footprint'), ('iai_oven_area/links/fridge_area_lower_drawer_handle', 'iai_oven_area/fridge_area_lower_drawer_handle'), ('iai_oven_area/links/fridge_area_lower_drawer_main', 'iai_oven_area/fridge_area_lower_drawer_main')], [('iai_oven_area/links/iai_fridge_door',
def bpb(): import numpy as np import kineverse.bpb_wrapper as pb import kineverse.model.geometry_model as gm import kineverse.operations.urdf_operations as urdf from kineverse.urdf_fix import urdf_filler from urdf_parser_py.urdf import URDF km = gm.GeometryModel() with open( res_pkg_path( 'package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml' ), 'r') as urdf_file: urdf.load_urdf(km, 'pr2', urdf_filler(URDF.from_xml_string(urdf_file.read()))) with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'), 'r') as urdf_file: urdf.load_urdf(km, 'kitchen', urdf_filler(URDF.from_xml_string(urdf_file.read()))) km.clean_structure() km.dispatch_events() kitchen = km.get_data('kitchen') pr2 = km.get_data('pr2') joint_symbols = { j.position for j in kitchen.joints.values() if hasattr(j, 'position') } joint_symbols |= { j.position for j in pr2.joints.values() if hasattr(j, 'position') } coll_world = km.get_active_geometry(joint_symbols) robot_parts = { n: o for n, o in coll_world.named_objects.items() if n[:4] == 'pr2/' } batch = {o: 2.0 for o in robot_parts.values()} print( 'Benchmarking by querying distances for {} objects. Total object count: {}.\n' .format(len(batch), len(coll_world.names))) dur_update = [] dur_distances = [] # print('Mesh files:\n{}'.format('\n'.join(sorted({pb.pb.get_shape_filename(s) for s in sum([o.collision_shape.child_shapes for o in coll_world.collision_objects], [])})))) # print('Objects:\n{}'.format('\n'.join(['{}:\n {}\n {} {}'.format(n, # o.transform.rotation, # o.transform.origin, # pb.pb.get_shape_filename(o.collision_shape.get_child(0))) # for n, o in coll_world.named_objects.items()]))) for x in tqdm(range(100)): start = Time.now() coll_world.update_world({ s: v for s, v in zip(joint_symbols, np.random.rand(len(joint_symbols))) }) dur_update.append(Time.now() - start) start = Time.now() distances = coll_world.closest_distances(batch) dur_distances.append(Time.now() - start) dur_update_mean = sum([d.to_sec() for d in dur_update]) / len(dur_update) dur_distances_mean = sum([d.to_sec() for d in dur_distances]) / len(dur_distances) print('Update mean: {}\nUpdate max: {}\n' 'Distances mean: {}\nDistances max: {}\n'.format( dur_update_mean, max(dur_update).to_sec(), dur_distances_mean, max(dur_distances).to_sec(), ))