def __init__(self, js_topic, obs_topic, integration_factor=0.05, iterations=30, visualize=False, use_timer=True): self.km_client = GeometryModel() # ModelClient(GeometryModel) self._js_msg = ValueMapMsg() self._integration_factor = integration_factor self._iterations = iterations self._use_timer = use_timer self._unintialized_poses = set() self.aliases = {} self.tracked_poses = {} self.integrator = None self.lock = RLock() self.soft_constraints = {} self.joints = set() self.joint_aliases = {} self.visualizer = ROSVisualizer('/tracker_vis', 'world') if visualize else None self.pub_js = rospy.Publisher(js_topic, ValueMapMsg, queue_size=1) self.sub_obs = rospy.Subscriber(obs_topic, PSAMsg, self.cb_process_obs, queue_size=5)
def __init__(self, name): self.name = name self.km = GeometryModel() self.qp_type = TQPB self.int_rules = {} self.recorded_terms = {} self.hard_constraints = {} self.soft_constraints = {} self.controlled_values = {} self.start_state = {} self.value_recorder = None self.symbol_recorder = None
def from_dict(cls, init_dict): km = GeometryModel() urdf = urdf_filler(URDF.from_xml_string(rospy.get_param('/robot_description'))) load_urdf(km, Path(urdf.name), urdf) km.clean_structure() km.dispatch_events() base_frame = init_dict['reference_frame'] eefs = [Endeffector.from_dict(km.get_data(Path(urdf.name)), base_frame, d) for d in init_dict['links']] return cls(km, Path(urdf.name), eefs)
from sensor_msgs.msg import JointState as JointStateMsg if __name__ == '__main__': rospy.init_node('iiwa_kinematic_sim') rosparam_description = rospy.get_param('/robot_description', None) if rosparam_description is None: with open( res_pkg_path( 'package://kineverse_experiment_world/urdf/iiwa_wsg_50.urdf' ), 'r') as f: rosparam_description = f.read() urdf = load_urdf_str(rosparam_description) km = GeometryModel() load_urdf(km, Path('iiwa'), urdf) km.clean_structure() km.dispatch_events() sim = KineverseKinematicSim(km, Path('iiwa'), state_topic='/iiwa/joint_states') sorted_names = sorted(sim.state_info.keys()) js_msg = JointStateMsg() js_msg.name = sorted_names def cb_pos_array_command(msg):
self.pub_state.publish(state_msg) # print('Performed update of estimate and published it.') # self.last_control = {} self.last_observation = {} if self.broadcaster is not None: self.broadcaster.update_state(self.tracker.get_estimated_state()) self.broadcaster.publish_state() if __name__ == '__main__': rospy.init_node('kineverse_ar_tracker') km = GeometryModel() try: location_x = rospy.get_param('/nobilia_x') location_y = rospy.get_param('/nobilia_y') location_z = rospy.get_param('/nobilia_z') location_yaw = rospy.get_param('/nobilia_yaw') except KeyError as e: print( f'Failed to get nobilia localization params. Original error:\n{e}') exit() reference_frame = rospy.get_param('~reference_frame', 'world') grab_from_tf = rospy.get_param('~grab_from_tf', False) grab_rate = rospy.get_param('~grab_rate', 20.0)
if hasattr(j, 'position') and gm.is_symbol(j.position)] robot_controlled_symbols = {gm.DiffSymbol(j) for j in joint_symbols} # if 'torso' not in str(j)} blacklist = {gm.Velocity(Path(f'{pr2_path}/torso_lift_joint'))} return joint_symbols, \ robot_controlled_symbols, \ start_pose, \ km.get_data(pr2_path).links['r_gripper_tool_frame'], \ blacklist if __name__ == '__main__': rospy.init_node('synchronized_motion') vis = ROSBPBVisualizer('~vis', base_frame='world') km = GeometryModel() robot_type = rospy.get_param('~robot', 'pr2') if robot_type.lower() == 'pr2': robot_urdf = load_urdf_file('package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml') elif robot_type.lower() == 'hsrb': robot_urdf = load_urdf_file('package://hsr_description/robots/hsrb4s.obj.urdf') elif robot_type.lower() == 'fetch': robot_urdf = load_urdf_file('package://fetch_description/robots/fetch.urdf') else: print(f'Unknown robot {robot_type}') exit(1) robot_name = robot_urdf.name robot_path = Path(robot_name) load_urdf(km, robot_path, robot_urdf, Path('world'))
with open(res_pkg_path('package://fetch_description/robots/fetch.urdf'), 'r') as urdf_file: fetch_urdf_str = urdf_file.read() kitchen_urdf_model = urdf_filler(URDF.from_xml_string(kitchen_urdf_str)) rospy.set_param('/{}/robot_description'.format(kitchen_urdf_model.name), kitchen_urdf_str) kitchen_js_pub = rospy.Publisher('/{}/joint_states'.format( kitchen_urdf_model.name), JointStateMsg, queue_size=1) tf_broadcaster = tf.TransformBroadcaster() # KINEMATIC MODEL km = GeometryModel() kitchen_prefix = Path(kitchen_urdf_model.name) load_urdf(km, kitchen_prefix, kitchen_urdf_model) plot_graph(generate_dependency_graph(km, {'connect': 'blue'}), '{}/kitchen_sandbox_dep_graph.pdf'.format(plot_dir)) # ROBOT STATE PUBLISHER sp_p = subprocess.Popen([ pub_path, '__name:={}_state_publisher'.format(kitchen_urdf_model.name), 'robot_description:=/{}/robot_description'.format( kitchen_urdf_model.name), '_tf_prefix:={}'.format(kitchen_urdf_model.name), 'joint_states:=/{}/joint_states'.format(kitchen_urdf_model.name) ])
URDF.from_xml_string(hacky_urdf_parser_fix(urdf_str))) kitchen_model = urdf_filler( URDF.from_xml_string(hacky_urdf_parser_fix(urdf_kitchen_str))) # traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( urdf_model.name), JointTrajectoryMsg, queue_size=1) kitchen_traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( kitchen_model.name), JointTrajectoryMsg, queue_size=1) # KINEMATIC MODEL km = GeometryModel() load_urdf(km, Path(robot), urdf_model) load_urdf(km, Path('kitchen'), kitchen_model) # create_nobilia_shelf(km, Path('nobilia'), gm.frame3_rpy(0, 0, 0.4, # gm.point3(1.2, 0, 0.8))) km.clean_structure() km.apply_operation_before('create world', 'create {}'.format(robot), ExecFunction(Path('world'), Frame, '')) base_joint_path = Path(f'{robot}/joints/to_world') # Insert base to world kinematic if use_omni: insert_omni_base(km, Path(robot), urdf_model.get_root()) else:
def main(create_figure=False, vis_mode=False, log_csv=True, min_n_dof=1, samples=300, n_observations=25, noise_lin=0.2, noise_ang=30, noise_steps=5): wait_duration = rospy.Duration(0.1) vis = ROSBPBVisualizer('ekf_vis', 'world') if vis_mode != 'none' else None km = GeometryModel() with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'), 'r') as urdf_file: urdf_kitchen_str = urdf_file.read() kitchen_model = urdf_filler( URDF.from_xml_string(hacky_urdf_parser_fix(urdf_kitchen_str))) load_urdf(km, Path('kitchen'), kitchen_model) km.clean_structure() km.dispatch_events() kitchen = km.get_data('kitchen') tracking_pools = [] for name, link in kitchen.links.items(): symbols = gm.free_symbols(link.pose) if len(symbols) == 0: continue for x in range(len(tracking_pools)): syms, l = tracking_pools[x] if len(symbols.intersection(syms) ) != 0: # BAD ALGORITHM, DOES NOT CORRECTLY IDENTIFY SETS tracking_pools[x] = (syms.union(symbols), l + [(name, link.pose)]) break else: tracking_pools.append((symbols, [(name, link.pose)])) # tracking_pools = [tracking_pools[7]] # print('Identified {} tracking pools:\n{}'.format(len(tracking_pools), tracking_pools)) all_ekfs = [ EKFModel(dict(poses), km.get_constraints_by_symbols(symbols)) for symbols, poses in tracking_pools ] # np.eye(len(symbols)) * 0.001 print('Created {} EKF models'.format(len(all_ekfs))) print('\n'.join(str(e) for e in all_ekfs)) # Sanity constraint min_n_dof = min(min_n_dof, len(all_ekfs)) iteration_times = [] for u in range(min_n_dof, len(all_ekfs) + 1): if rospy.is_shutdown(): break ekfs = all_ekfs[:u] observed_poses = {} for ekf in ekfs: for link_name, _ in ekf.takers: observed_poses[link_name] = kitchen.links[link_name].pose names, poses = zip(*sorted(observed_poses.items())) state_symbols = union([gm.free_symbols(p) for p in poses]) ordered_state_vars = [ s for _, s in sorted((str(s), s) for s in state_symbols) ] state_constraints = {} for n, c in km.get_constraints_by_symbols(state_symbols).items(): if gm.is_symbol(c.expr): s = gm.free_symbols(c.expr).pop() fs = gm.free_symbols(c.lower).union(gm.free_symbols(c.upper)) if len(fs.difference({s})) == 0: state_constraints[s] = (float(gm.subs(c.lower, {s: 0})), float(gm.subs(c.upper, {s: 0}))) state_bounds = np.array([ state_constraints[s] if s in state_constraints else [-np.pi * 0.5, np.pi * 0.5] for s in ordered_state_vars ]) state_fn = gm.speed_up(gm.vstack(*poses), ordered_state_vars) subworld = km.get_active_geometry(state_symbols) # Generate observation noise print('Generating R matrices...') n_cov_obs = 400 full_log = [] dof_iters = [] # EXPERIMENT for lin_std, ang_std in [(noise_lin, noise_ang * (np.pi / 180.0))]: # zip(np.linspace(0, noise_lin, noise_steps), # np.linspace(0, noise_ang * (np.pi / 180.0), noise_steps)): if rospy.is_shutdown(): break # INITIALIZE SENSOR MODEL training_obs = [] state = np.random.uniform(state_bounds.T[0], state_bounds.T[1]) observations = state_fn.call2(state) for _ in range(n_cov_obs): noisy_obs = {} for x, noise in enumerate([ t.dot(r) for t, r in zip( random_normal_translation(len(poses), 0, lin_std), random_rot_normal(len(poses), 0, ang_std)) ]): noisy_obs[names[x]] = observations[x * 4:x * 4 + 4, :4].dot(noise) training_obs.append(noisy_obs) for ekf in ekfs: ekf.generate_R(training_obs) # ekf.set_R(np.eye(len(ekf.ordered_vars)) * 0.1) # Generate figure gridsize = (4, samples) plot_size = (4, 4) fig = plt.figure(figsize=(gridsize[1] * plot_size[0], gridsize[0] * plot_size[1])) if create_figure else None gt_states = [] states = [[] for x in range(samples)] variances = [[] for x in range(samples)] e_obs = [[] for x in range(samples)] print('Starting iterations') for k in tqdm(range(samples)): if rospy.is_shutdown(): break state = np.random.uniform(state_bounds.T[0], state_bounds.T[1]) gt_states.append(state) observations = state_fn.call2(state).copy() gt_obs_d = { n: observations[x * 4:x * 4 + 4, :4] for x, n in enumerate(names) } subworld.update_world(dict(zip(ordered_state_vars, state))) if vis_mode == 'iter' or vis_mode == 'io': vis.begin_draw_cycle('gt', 'noise', 'estimate', 't_n', 't0') vis.draw_world('gt', subworld, g=0, b=0) vis.render('gt') estimates = [] for ekf in ekfs: particle = ekf.spawn_particle() estimates.append(particle) initial_state = dict( sum([[(s, v) for s, v in zip(ekf.ordered_vars, e.state)] for e, ekf in zip(estimates, ekfs)], [])) initial_state = np.array( [initial_state[s] for s in ordered_state_vars]) if initial_state.min() < state_bounds.T[0].min( ) or initial_state.max() > state_bounds.T[1].max(): raise Exception( 'Estimate initialization is out of bounds: {}'.format( np.vstack([initial_state, state_bounds.T]).T)) initial_delta = state - initial_state for y in range(n_observations): # Add noise to observations noisy_obs = {} for x, noise in enumerate([ t.dot(r) for t, r in zip( random_normal_translation( len(poses), 0, lin_std), random_rot_normal(len(poses), 0, ang_std)) ]): noisy_obs[names[x]] = observations[x * 4:x * 4 + 4, :4].dot(noise) if vis_mode in {'iter', 'iter-trail'} or (vis_mode == 'io' and y == 0): for n, t in noisy_obs.items(): subworld.named_objects[Path( ('kitchen', 'links', n))].np_transform = t if vis_mode != 'iter-trail': vis.begin_draw_cycle('noise') vis.draw_world('noise', subworld, r=0, g=0, a=0.1) vis.render('noise') start_time = time() for estimate, ekf in zip(estimates, ekfs): if y > 0: control = np.zeros(len(ekf.ordered_controls)) estimate.state, estimate.cov = ekf.predict( estimate.state.flatten(), estimate.cov, control) obs_vector = ekf.gen_obs_vector(noisy_obs) estimate.state, estimate.cov = ekf.update( estimate.state, estimate.cov, ekf.gen_obs_vector(noisy_obs)) if vis_mode in {'iter', 'iter-trail'}: subworld.update_world({ s: v for s, v in zip(ekf.ordered_vars, estimate.state) }) else: obs_vector = ekf.gen_obs_vector(noisy_obs) for _ in range(1): h_prime = ekf.h_prime_fn.call2(estimate.state) obs_delta = obs_vector.reshape( (len(obs_vector), 1)) - ekf.h_fn.call2( estimate.state) estimate.state += (h_prime.T.dot(obs_delta) * 0.1).reshape( estimate.state.shape) if vis_mode in {'iter', 'io'}: subworld.update_world({ s: v for s, v in zip(ekf.ordered_vars, estimate.state) }) if vis_mode != 'none' and y == 0: vis.draw_world('t0', subworld, b=0, a=1) vis.render('t0') elif vis_mode in {'iter', 'iter-trail'}: if vis_mode != 'iter-trail': vis.begin_draw_cycle('t_n') vis.draw_world('t_n', subworld, b=0, a=1) vis.render('t_n') if log_csv or fig is not None: e_state_d = dict( sum([[(s, v) for s, v in zip(ekf.ordered_vars, e.state)] for e, ekf in zip(estimates, ekfs)], [])) covs = dict( sum([[(s, v) for s, v in zip( ekf.ordered_vars, np.sqrt(np.trace(e.cov)))] for e, ekf in zip(estimates, ekfs)], [])) e_state = np.hstack([ e_state_d[s] for s in ordered_state_vars ]).reshape((len(e_state_d), )) if log_csv: full_log.append( np.hstack( ([lin_std, ang_std], state, e_state.flatten(), np.array([ covs[s] for s in ordered_state_vars ])))) if fig is not None: e_obs[k].append( np.array([ np.abs( ekf.gen_obs_vector(gt_obs_d) - ekf.h_fn.call2(e.state)).max() for e, ekf in zip(estimates, ekfs) ])) states[k].append(e_state) variances[k].append( np.array([covs[s] for s in ordered_state_vars])) else: if vis_mode == 'io': for estimate, ekf in zip(estimates, ekfs): subworld.update_world({ s: v for s, v in zip(ekf.ordered_vars, estimate.state) }) vis.draw_world('t_n', subworld, r=0, b=0, a=1) vis.render('t_n') dof_iters.append(time() - start_time) if fig is not None: axes = [ plt.subplot2grid(gridsize, (y, 0), colspan=1, rowspan=1) for y in range(gridsize[0]) ] axes = np.array( sum([[ plt.subplot2grid(gridsize, (y, x), colspan=1, rowspan=1, sharey=axes[y]) for y in range(gridsize[0]) ] for x in range(1, gridsize[1])], axes)).reshape( (gridsize[1], gridsize[0])) for x, (gt_s, state, variance, obs_delta, (ax_s, ax_d, ax_o, ax_v)) in enumerate( zip(gt_states, states, variances, e_obs, axes)): for y in gt_s: ax_s.axhline(y, xmin=0.97, xmax=1.02) ax_s.set_title('State; Sample: {}'.format(x)) ax_d.set_title('Delta from GT; Sample: {}'.format(x)) ax_o.set_title('Max Delta in Obs; Sample: {}'.format(x)) ax_v.set_title('Standard Deviation; Sample: {}'.format(x)) ax_s.plot(state) ax_d.plot(gt_s - np.vstack(state)) ax_o.plot(obs_delta) ax_v.plot(variance) ax_s.grid(True) ax_d.grid(True) ax_o.grid(True) ax_v.grid(True) fig.tight_layout() plt.savefig( res_pkg_path( 'package://kineverse_experiment_world/test/ekf_object_tracker_{}_{}.png' .format(lin_std, ang_std))) iteration_times.append(dof_iters) if log_csv: df = pd.DataFrame( columns=['lin_std', 'ang_std'] + ['gt_{}'.format(x) for x in range(len(state_symbols))] + ['ft_{}'.format(x) for x in range(len(state_symbols))] + ['var_{}'.format(x) for x in range(len(state_symbols))], data=full_log) df.to_csv(res_pkg_path( 'package://kineverse_experiment_world/test/ekf_object_tracker.csv' ), index=False) df = pd.DataFrame( columns=[str(x) for x in range(1, len(iteration_times) + 1)], data=np.vstack(iteration_times).T) df.to_csv(res_pkg_path( 'package://kineverse_experiment_world/test/ekf_object_tracker_performance.csv' ), index=False)
) exit(1) reference_frame = rospy.get_param('~reference_frame', 'world') model_path = rospy.get_param('~model') observations = rospy.get_param('~features', None) num_samples = rospy.get_param('~samples', 5) if type(observations) is not dict: print(type(observations)) print( 'Required parameter ~features needs to be a dict mapping tracked model paths to observation names' ) exit(1) km = GeometryModel() model_name = load_localized_model(km, model_path, reference_frame) km.clean_structure() km.dispatch_events() model = km.get_data(model_name) tracker = Kineverse6DQPTracker(km, observations.keys(), num_samples=num_samples) node = ROSQPEManager(tracker, model, Path(model_name), reference_frame,
if not rospy.has_param('/robot_description'): print( 'PR2 will be loaded from parameter server. It is currently not there.' ) exit(1) urdf_model = load_urdf_file( 'package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml') # urdf_model = load_urdf_str(rospy.get_param('/robot_description')) if urdf_model.name.lower() != 'pr2': print( f'The loaded robot is not the PR2. Its name is "{urdf_model.name}"' ) exit(1) km = GeometryModel() load_urdf(km, Path('pr2'), urdf_model) km.clean_structure() reference_frame = rospy.get_param('~reference_frame', urdf_model.get_root()) use_base = reference_frame != urdf_model.get_root() if use_base: insert_omni_base(km, Path('pr2'), urdf_model.get_root(), reference_frame) base_joint_path = Path(f'pr2/joints/to_{reference_frame}') visualizer = ROSBPBVisualizer('~vis', base_frame=reference_frame)
urdf_kitchen_str = urdf_file.read() urdf_model = URDF.from_xml_string(urdf_str) kitchen_model = urdf_filler(URDF.from_xml_string(urdf_kitchen_str)) traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( urdf_model.name), JointTrajectoryMsg, queue_size=1) kitchen_traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( kitchen_model.name), JointTrajectoryMsg, queue_size=1) # KINEMATIC MODEL km = GeometryModel() load_urdf(km, Path('fetch'), urdf_model) load_urdf(km, Path('kitchen'), kitchen_model) km.clean_structure() km.apply_operation_before('create world', 'create fetch', CreateComplexObject(Path('world'), Frame(''))) if use_omni: base_op = create_omnibase_joint_with_symbols( Path('world/pose'), Path('fetch/links/base_link/pose'), Path('fetch/joints/to_world'), vector3(0, 0, 1), 1.0, 0.6, Path('fetch')) else: base_op = create_roomba_joint_with_symbols( Path('world/pose'), Path('fetch/links/base_link/pose'),
class TrackerNode(object): def __init__(self, js_topic, obs_topic, integration_factor=0.05, iterations=30, visualize=False, use_timer=True): self.km_client = GeometryModel() # ModelClient(GeometryModel) self._js_msg = ValueMapMsg() self._integration_factor = integration_factor self._iterations = iterations self._use_timer = use_timer self._unintialized_poses = set() self.aliases = {} self.tracked_poses = {} self.integrator = None self.lock = RLock() self.soft_constraints = {} self.joints = set() self.joint_aliases = {} self.visualizer = ROSVisualizer('/tracker_vis', 'world') if visualize else None self.pub_js = rospy.Publisher(js_topic, ValueMapMsg, queue_size=1) self.sub_obs = rospy.Subscriber(obs_topic, PSAMsg, self.cb_process_obs, queue_size=5) def track(self, model_path, alias): with self.lock: if type(model_path) is not str: model_path = str(model_path) start_tick = len(self.tracked_poses) == 0 and self._use_timer if model_path not in self.tracked_poses: syms = [ Position(Path(model_path) + (x, )) for x in ['x', 'y', 'z', 'qx', 'qy', 'qz', 'qw'] ] def rf(msg, state): state[syms[0]] = msg.position.x state[syms[1]] = msg.position.y state[syms[2]] = msg.position.z state[syms[3]] = msg.orientation.x state[syms[4]] = msg.orientation.y state[syms[5]] = msg.orientation.z state[syms[6]] = msg.orientation.w def process_model_update(data): self._generate_pose_constraints(model_path, data) axis = vector3(*syms[:3]) self.aliases[alias] = model_path self.tracked_poses[model_path] = TrackerEntry( frame3_quaternion(*syms), rf, process_model_update) self._unintialized_poses.add(model_path) if type(self.km_client) == ModelClient: self.km_client.register_on_model_changed( Path(model_path), process_model_update) else: process_model_update(self.km_client.get_data(model_path)) if start_tick: self.timer = rospy.Timer(rospy.Duration(1.0 / 50), self.cb_tick) def stop_tracking(self, model_path): with self.lock: if type(model_path) is not str: model_path = str(model_path) if model_path in self.tracked_poses: te = self.tracked_poses[model_path] self.km_client.deregister_on_model_changed(te.model_cb) del self.tracked_poses[model_path] del self.soft_constraints['{} align rotation 0'.format( model_path)] del self.soft_constraints['{} align rotation 1'.format( model_path)] del self.soft_constraints['{} align rotation 2'.format( model_path)] del self.soft_constraints['{} align position'.format( model_path)] self.generate_opt_problem() if len(self.tracked_poses) == 0: self.timer.shutdown() @profile def cb_tick(self, timer_event): with self.lock: if self.integrator is not None: self.integrator.run(self._integration_factor, self._iterations, logging=False) self._js_msg.header.stamp = Time.now() self._js_msg.symbol, self._js_msg.value = zip( *[(n, self.integrator.state[s]) for s, n in self.joint_aliases.items()]) self.pub_js.publish(self._js_msg) if self.visualizer is not None: poses = [ t.pose.subs(self.integrator.state) for t in self.tracked_poses.values() ] self.visualizer.begin_draw_cycle('obs_points') self.visualizer.draw_points( 'obs_points', cm.eye(4), 0.1, [pos_of(p) for p in poses if len(p.free_symbols) == 0]) # self.visualizer.draw_poses('obs_points', se.eye(4), 0.1, 0.02, [p for p in poses if len(p.free_symbols) == 0]) self.visualizer.render('obs_points') else: print('Integrator does not exist') @profile def cb_process_obs(self, poses_msg): # print('Got new observation') with self.lock: for pose_msg in poses_msg.poses: if pose_msg.header.frame_id in self.aliases and self.integrator is not None: self.tracked_poses[self.aliases[ pose_msg.header.frame_id]].update_state( pose_msg.pose, self.integrator.state) if self.aliases[pose_msg.header. frame_id] in self._unintialized_poses: self._unintialized_poses.remove( self.aliases[pose_msg.header.frame_id]) def _generate_pose_constraints(self, str_path, model): with self.lock: if str_path in self.tracked_poses: align_rotation = '{} align rotation'.format(str_path) align_position = '{} align position'.format(str_path) if model is not None: m_free_symbols = cm.free_symbols(model.pose) if len(m_free_symbols) > 0: te = self.tracked_poses[str_path] self.joints |= m_free_symbols self.joint_aliases = {s: str(s) for s in self.joints} r_dist = norm( cm.rot_of(model.pose) - cm.rot_of(te.pose)) self.soft_constraints[align_rotation] = SC( -r_dist, -r_dist, 1, r_dist) # print(align_position) # print(model.pose) dist = norm(cm.pos_of(model.pose) - cm.pos_of(te.pose)) # print('Distance expression:\n{}'.format(dist)) # print('Distance expression symbol overlap:\n{}'.format(m_free_symbols.intersection(cm.free_symbols(dist)))) # for s in m_free_symbols: # print('Diff w.r.t {}:\n{}'.format(s, cm.diff(dist, s))) self.soft_constraints[align_position] = SC( -dist, -dist, 1, dist) self.generate_opt_problem() # Avoid crashes due to insufficient perception data. This is not fully correct. if str_path in self._unintialized_poses: state = self.integrator.state.copy( ) if self.integrator is not None else {} state.update({ s: 0.0 for s in m_free_symbols if s not in state }) null_pose = cm.subs(model.pose, state) pos = cm.pos_of(null_pose) quat = real_quat_from_matrix(cm.rot_of(null_pose)) msg = PoseMsg() msg.position.x = pos[0] msg.position.y = pos[1] msg.position.z = pos[2] msg.orientation.x = quat[0] msg.orientation.y = quat[1] msg.orientation.z = quat[2] msg.orientation.w = quat[3] te.update_state(msg, self.integrator.state) else: regenerate_problem = False if align_position in self.soft_constraints: del self.soft_constraints[align_position] regenerate_problem = True if align_rotation in self.soft_constraints: del self.soft_constraints[align_rotation] regenerate_problem = True if regenerate_problem: self.generate_opt_problem() def generate_opt_problem(self): joint_symbols = self.joints opt_symbols = {DiffSymbol(j) for j in joint_symbols} hard_constraints = self.km_client.get_constraints_by_symbols( joint_symbols | opt_symbols) controlled_values, hard_constraints = generate_controlled_values( hard_constraints, opt_symbols) for mp in self._unintialized_poses: state = self.integrator.state if self.integrator is not None else {} te = self.tracked_poses[mp] state.update({s: 0.0 for s in cm.free_symbols(te.pose)}) state = {} if self.integrator is None else self.integrator.state self.integrator = CommandIntegrator(TQPB(hard_constraints, self.soft_constraints, controlled_values), start_state=state) self.integrator.restart('Pose Tracking')
#!/usr/bin/env python import kineverse.gradients.common_math as cm from kineverse.gradients.gradient_math import point3, vector3, translation3, frame3_axis_angle, Position from kineverse.model.geometry_model import GeometryModel, RigidBody, Geometry, Path from kineverse.operations.basic_operations import CreateComplexObject as OP_CCO, \ CreateSingleValue as OP_CSV from kineverse.operations.special_kinematics import SetBallJoint as OP_SBJ if __name__ == '__main__': km = GeometryModel() geom_head = Geometry( Path('head'), cm.eye(4), 'mesh', mesh='package://kineverse_experiment_world/urdf/faucet_head.obj') rb_head = RigidBody(Path('world'), cm.eye(4), geometry={0: geom_head}, collision={0: geom_head}) geom_base = Geometry( Path('base'), cm.eye(4), 'mesh', mesh='package://kineverse_experiment_world/urdf/faucet_base.obj') rb_base = RigidBody(Path('world'), cm.eye(4), geometry={0: geom_base},
f'connect {prefix}/links/door {prefix}/links/handle', CreateURDFFrameConnection(prefix + ('joints', 'handle'), Path(handle_rb.parent), Path(f'{prefix}/links/handle'))) km.apply_operation( f'add lock {door_position}', ConditionalDoorHandleConstraints(door_position, handle_position, math.pi * 0.01, math.pi * 0.15)) if __name__ == '__main__': rospy.init_node('kineverse_door_opening') visualizer = ROSBPBVisualizer('~visualization', base_frame='world') km = GeometryModel() with open( res_pkg_path( 'package://kineverse_experiment_world/urdf/iiwa_wsg_50.urdf'), 'r') as f: iiwa_urdf = urdf_filler( URDF.from_xml_string(hacky_urdf_parser_fix(f.read()))) load_urdf(km, Path('iiwa'), iiwa_urdf) km.clean_structure() km.dispatch_events() door_x, door_y = [gm.Position(f'localization_{x}') for x in 'xy'] create_door(km,
km.set_data('pb_l', pb_l_obj) km.set_data('pb_r', pb_r_obj) km.set_data('ph_l', ph_l_obj) km.set_data('ph_r', ph_r_obj) km.set_data('bar_obj', bar_obj) km.clean_structure() km.dispatch_events() if __name__ == '__main__': rospy.init_node('kinematics_test') vis = ROSBPBVisualizer('kinematics_test', 'world') km = GeometryModel() simple_kinematics(km, vis) symbols = set(km._symbol_co_map.keys()) coll_world = km.get_active_geometry(symbols) broadcaster = ModelTFBroadcaster_URDF('robot_description', '', km.get_data('')) print('\n '.join([str(x) for x in sorted(broadcaster.frame_info)])) last_update = time() while not rospy.is_shutdown(): now = time() if now - last_update >= (1.0 / 50.0): state = {s: sin(time() + x * 0.1) for x, s in enumerate(symbols)}
def test_urdf_insertion(self): gm = GeometryModel() sym_a = create_pos('a') sym_b = create_pos('b') sym_c = create_pos('c') box_shape = Geometry('my_box', se.eye(4), 'box') mesh_shape = Geometry('my_mesh', se.eye(4), 'mesh', mesh='package://kineverse/meshes/suzanne.obj') box_link = KinematicLink('world', translation3(sym_a, 1, 0), geometry={'0': box_shape}, collision={'0': box_shape}) mesh_link = KinematicLink('world', translation3(1, sym_b, 0), geometry={'0': mesh_shape}, collision={'0': mesh_shape}) robot = URDFRobot('my_bot') robot.links['my_box'] = box_link robot.links['my_mesh'] = mesh_link gm.set_data('my_bot', robot) gm.clean_structure() gm.dispatch_events() # Generates the pose expressions for links sub_world = gm.get_active_geometry({sym_a}) self.assertEquals(len(sub_world.names), 1) self.assertIn('my_bot/links/my_box', sub_world.names) self.assertIn('my_bot/links/my_box', sub_world.named_objects) sub_world = gm.get_active_geometry({sym_a, sym_b}) self.assertEquals(len(sub_world.names), 2) self.assertIn('my_bot/links/my_box', sub_world.names) self.assertIn('my_bot/links/my_mesh', sub_world.names) self.assertIn('my_bot/links/my_box', sub_world.named_objects) self.assertIn('my_bot/links/my_mesh', sub_world.named_objects) sub_world = gm.get_active_geometry({sym_c}) self.assertEquals(len(sub_world.names), 0)
print('Angle at {:>8.2f}: {:> 8.2f}'.format( np.rad2deg(p), np.rad2deg(np.arccos(cos.flatten()[0])))) world.update_world(state) visualizer.draw_world('world', world) visualizer.render('world') def handle_sigint(*args): dpg.stop_dearpygui() if __name__ == '__main__': rospy.init_node('nobilia_shelf_test') km = GeometryModel() vis = ROSBPBVisualizer('kineverse/nobilia/vis', base_frame='world') origin_pose = gm.translation3(x_loc_sym, 0, 0) debug = create_nobilia_shelf(km, Path('nobilia'), origin_pose) km.clean_structure() km.dispatch_events() shelf = km.get_data('nobilia') shelf_pos = shelf.joints['hinge'].position world = km.get_active_geometry({shelf_pos }.union(gm.free_symbols(origin_pose)))
urdf_kitchen_str = urdf_file.read() urdf_model = urdf_filler(URDF.from_xml_string(urdf_str)) kitchen_model = urdf_filler(URDF.from_xml_string(urdf_kitchen_str)) traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( urdf_model.name), JointTrajectoryMsg, queue_size=1) kitchen_traj_pup = rospy.Publisher('/{}/commands/joint_trajectory'.format( kitchen_model.name), JointTrajectoryMsg, queue_size=1) # KINEMATIC MODEL km = GeometryModel() load_urdf(km, Path(robot), urdf_model) load_urdf(km, Path('kitchen'), kitchen_model) km.clean_structure() km.apply_operation_before('create world', 'create {}'.format(robot), CreateComplexObject(Path('world'), Frame(''))) if robot == 'pr2' or use_omni: base_op = create_omnibase_joint_with_symbols( Path('world/pose'), Path('{}/links/{}/pose'.format(robot, urdf_model.get_root())), Path('{}/joints/to_world'.format(robot)), vector3(0, 0, 1), 1.0, 0.6, Path(robot)) else: base_op = create_roomba_joint_with_symbols(