def cb_tick(self, timer_event): with self.lock: if self.integrator is not None: self.integrator.run(self._integration_factor, self._iterations, logging=False) self._js_msg.header.stamp = Time.now() self._js_msg.symbol, self._js_msg.value = zip( *[(n, self.integrator.state[s]) for s, n in self.joint_aliases.items()]) self.pub_js.publish(self._js_msg) if self.visualizer is not None: poses = [ t.pose.subs(self.integrator.state) for t in self.tracked_poses.values() ] self.visualizer.begin_draw_cycle('obs_points') self.visualizer.draw_points( 'obs_points', cm.eye(4), 0.1, [pos_of(p) for p in poses if len(p.free_symbols) == 0]) # self.visualizer.draw_poses('obs_points', se.eye(4), 0.1, 0.02, [p for p in poses if len(p.free_symbols) == 0]) self.visualizer.render('obs_points') else: print('Integrator does not exist')
def get_cmd(self, state, deltaT): _state = state.copy() if self.use_camera: _state[self._poi_pos] = math.sin( (Time.now() - self._start).to_sec()) return self.qp.get_cmd(_state)
def __init__(self, model_type, *args): if model_type is not None and not issubclass(model_type, EventModel): raise Exception('Model type must be a subclass of EventModel. Type "{}" is not.'.format(model_type)) self.lock = RLock() self.km = model_type(*args) if model_type is not None else EventModel() self._last_update = Time(0) self.tracked_paths = PathSet() self.tracked_symbols = set() for n, a in [(d, getattr(self.km, d)) for d in dir(self.km) if d[0] != '_']: if callable(a) and not hasattr(self, n): setattr(self, n, a)
def _integrate_state(self): now = Time.now() if self._stamp_last_integration is not None: dt = (now - self._stamp_last_integration).to_sec() self._state[QPStateModel.DT_SYM] = dt new_state = self.transition_fn.call2([self._state[x] for x in self.transition_args]).flatten() for x, (s, v) in enumerate(zip(self.ordered_vars, new_state)): delta = v - self._state[s] self._state[s] = v for state in self._state_buffer: state[x] += delta self._stamp_last_integration = now
def __init__(self, km, controlled_symbols, resting_pose, camera_path=None): tucking_constraints = {} if resting_pose is not None: tucking_constraints = { f'tuck {s}': SC(p - s, p - s, 1, s) for s, p in resting_pose.items() } # print('Tuck state:\n {}\nTucking constraints:\n {}'.format('\n '.join(['{}: {}'.format(k, v) for k, v in self._resting_pose.items()]), '\n '.join(tucking_constraints.keys()))) # tucking_constraints.update(self.taxi_constraints) self.use_camera = camera_path is not None if camera_path is not None: self._poi_pos = gm.Symbol('poi') poi = gm.point3(1.5, 0.5, 0.0) + gm.vector3( 0, self._poi_pos * 2.0, 0) camera = km.get_data(camera_path) cam_to_poi = poi - gm.pos_of(camera.pose) lookat_dot = 1 - gm.dot_product(gm.x_of(camera.pose), cam_to_poi) / gm.norm(cam_to_poi) tucking_constraints['sweeping gaze'] = SC(-lookat_dot * 5, -lookat_dot * 5, 1, lookat_dot) symbols = set() for c in tucking_constraints.values(): symbols |= gm.free_symbols(c.expr) joint_symbols = { s for s in symbols if gm.get_symbol_type(s) != gm.TYPE_UNKNOWN } controlled_symbols = {gm.DiffSymbol(s) for s in joint_symbols} hard_constraints = km.get_constraints_by_symbols( symbols.union(controlled_symbols)) controlled_values, hard_constraints = generate_controlled_values( hard_constraints, controlled_symbols) controlled_values = depth_weight_controlled_values( km, controlled_values) self.qp = TQPB(hard_constraints, tucking_constraints, controlled_values) self._start = Time.now()
def cb_robot_joint_state(self, state_msg): if self._t_last_update is None: self._t_last_update = Time.now() return now = Time.now() dt = (now - self._t_last_update).to_sec() self._t_last_update = now self.state[self._poi_pos] = cos(now.to_sec()) if self.robot_js_aliases is None: return for name, p in zip(state_msg.name, state_msg.position): if name in self.robot_js_aliases: self.state[self.robot_js_aliases[name]] = p cmd = {} if self._current_target is None: if self.idle_controller is not None: print('Idling around...') cmd = self.idle_controller.get_cmd(self.state, deltaT=dt) else: if self.pushing_controller is not None: if not self._needs_odom or self._has_odom: try: now = Time.now() cmd = self.pushing_controller.get_cmd(self.state, deltaT=dt) time_taken = Time.now() - now print('Command generated. Time taken: {} Rate: {} hz'. format(time_taken.to_sec(), 1.0 / time_taken.to_sec())) except Exception as e: time_taken = Time.now() - now print( 'Command generation failed. Time taken: {} Rate: {} hz\nError: {}' .format(time_taken.to_sec(), 1.0 / time_taken.to_sec(), e)) #print(self.pushing_controller.last_matrix_str()) else: print('Waiting for odom...') if len(cmd) > 0: #print('commands:\n {}'.format('\n '.join(['{}: {}'.format(s, v) for s, v in cmd.items()]))) cmd_msg = JointStateMsg() cmd_msg.header.stamp = Time.now() cmd_msg.name, cmd_msg.velocity = zip( *[(self.inverse_js_aliases[erase_type(s)], v) for s, v in cmd.items()]) cmd_msg.position = [0] * len(cmd_msg.name) cmd_msg.effort = cmd_msg.position self.pub_cmd.publish(cmd_msg)
'gaze_align': pushing_controller.look_goal, 'in contact': pushing_controller.in_contact, 'location_x': base_joint.x_pos, 'location_y': base_joint.y_pos, 'rotation_a': base_joint.a_pos }, printed_exprs={}) # RUN int_factor = 0.02 integrator.restart(f'{robot} Cartesian Goal Example') # print('\n'.join('{}: {}'.format(s, r) for s, r in integrator.integration_rules.items())) try: start = Time.now() integrator.run(int_factor, 1200, logging=False, real_time=False, show_progress=True) total_dur.append((Time.now() - start).to_sec()) n_iter.append(integrator.current_iteration + 1) except Exception as e: print(f'Exception during integration:\n{e}') # DRAW print('Drawing recorders') # draw_recorders([filter_contact_symbols(integrator.recorder, integrator.qp_builder), integrator.sym_recorder], 4.0/9.0, 8, 4).savefig('{}/{}_sandbox_{}_plots.png'.format(plot_dir, robot, part)) if PANDA_LOGGING: rec_w, rec_b, rec_c, recs = convert_qp_builder_log(
#stl_body = create_object(suzanne_stl, pb.Transform(0,-1, 0)) kw.add_collision_object(cube_body) kw.add_collision_object(cube2_body) #kw.add_collision_object(obj_body) #kw.add_collision_object(stl_body) cube_body.activate(True) cube2_body.activate(True) #obj_body.activate(True) #stl_body.activate(True) #print(cube_body.isActive, cube2_body.isActive, obj_body.isActive, stl_body.isActive) #print(cube_body.activation_state, cube2_body.activation_state, obj_body.activation_state, stl_body.activation_state) last_time = Time.now() while not rospy.is_shutdown(): now = Time.now() dt = now - last_time if dt.to_sec() >= 0.02: last_time = now y = math.sin(now.to_sec()) vis.begin_draw_cycle('objects', 'contacts') # pose_array = np.array([[1,0,0,-y - 1], # [0,1,0,0], # [0,0,1,0], # [0,0,0,1], # [1,0,0,y + 1], # [0,1,0,0], # [0,0,1,0], # [0,0,0,1],
km.clean_structure() km.apply_operation_before('create world', 'create fetch', CreateComplexObject(Path('world'), Frame(''))) roomba_op = create_roomba_joint_with_symbols( Path('world/pose'), Path('fetch/links/base_link/pose'), Path('fetch/joints/to_world'), vector3(0, 0, 1), vector3(1, 0, 0), 1.0, 0.6, Path('fetch')) km.apply_operation_after('connect world base_link', 'create fetch/base_link', roomba_op) km.clean_structure() with open(res_pkg_path('package://kineverse/test/fetch.json'), 'w') as fetch_json: start = Time.now() print('Starting to write JSON') json.dump(km.get_data('fetch'), fetch_json) #, sort_keys=True, indent=' ') print('Writing JSON done. Time taken: {} seconds'.format( (Time.now() - start).to_sec())) print('Plotting dependency graph...') plot_graph(generate_dependency_graph(km, {'connect': 'blue'}), '{}/sandbox_dep_graph.pdf'.format(plot_dir)) plot_graph(generate_modifications_graph(km, {'connect': 'blue'}), '{}/sandbox_mod_graph.pdf'.format(plot_dir)) print('Done plotting.') # GOAL DEFINITION eef_pose = km.get_data('fetch/links/gripper_link/pose')
goal_ax, goal_a = axis_angle_from_matrix(r_goal) goal_axa = goal_ax * goal_a t_results = {} for k, m in methods.items(): r_ctrl = rs[k] constraints = m(r_ctrl, r_goal) integrator = CommandIntegrator(TQPB({}, constraints, {str(s): CV(-1, 1, DiffSymbol(s), 1e-3) for s in sum([list(free_symbols(c.expr)) for c in constraints.values()], [])}), start_state={s_x: 1, s_y: 0, s_z: 0}, equilibrium=0.001)#, # recorded_terms=recorded_terms) integrator.restart('Convergence Test {} {}'.format(x, k)) # try: t_start = Time.now() integrator.run(step, 100) t_per_it = (Time.now() - t_start).to_sec() / integrator.current_iteration final_r = rs_eval[k](**{str(s): v for s, v in integrator.state.items()}) final_ax, final_a = axis_angle_from_matrix(final_r) final_axa = final_ax * final_a error_9d = sum([np.abs(float(x)) for x in (final_r - r_goal).elements()]) error_axa_sym = norm(final_axa - goal_axa) # print(final_ax, final_a, type(final_a)) error_axa = float(error_axa_sym) t_results[k] = OptResult(error_9d, error_axa, integrator.recorder.data[str(s_x)], integrator.recorder.data[str(s_y)], integrator.recorder.data[str(s_z)], integrator.current_iteration,
[('iai_oven_area/links/sink_area_trash_drawer_handle', 'iai_oven_area/sink_area_trash_drawer_handle'), ('iai_oven_area/links/sink_area_trash_drawer_main', 'iai_oven_area/sink_area_trash_drawer_main')] ] columns = [ 'DoF', 'Poses', 'Linear SD', 'Angular SD', 'Mean Error', 'SD Error', 'Min Error', 'Max Error', 'Mean Iterations', 'Iteration Duration' ] result_rows = [] visualizer = ROSBPBVisualizer('/tracker_vis', 'world') last_n_dof = 0 total_start = Time.now() for group in groups: for path, alias in group: tracker.track(path, alias) constraints = tracker.km_client.get_constraints_by_symbols( tracker.joints) constraints = { c.expr: [ float(subs(c.lower, {c.expr: 0})), float(subs(c.upper, {c.expr: 0})) ] for k, c in constraints.items() if cm.is_symbol(c.expr) and not is_symbolic(subs(c.lower, {c.expr: 0}))
def bpb(): import numpy as np import kineverse.bpb_wrapper as pb import kineverse.model.geometry_model as gm import kineverse.operations.urdf_operations as urdf from kineverse.urdf_fix import urdf_filler from urdf_parser_py.urdf import URDF km = gm.GeometryModel() with open( res_pkg_path( 'package://iai_pr2_description/robots/pr2_calibrated_with_ft2.xml' ), 'r') as urdf_file: urdf.load_urdf(km, 'pr2', urdf_filler(URDF.from_xml_string(urdf_file.read()))) with open(res_pkg_path('package://iai_kitchen/urdf_obj/IAI_kitchen.urdf'), 'r') as urdf_file: urdf.load_urdf(km, 'kitchen', urdf_filler(URDF.from_xml_string(urdf_file.read()))) km.clean_structure() km.dispatch_events() kitchen = km.get_data('kitchen') pr2 = km.get_data('pr2') joint_symbols = { j.position for j in kitchen.joints.values() if hasattr(j, 'position') } joint_symbols |= { j.position for j in pr2.joints.values() if hasattr(j, 'position') } coll_world = km.get_active_geometry(joint_symbols) robot_parts = { n: o for n, o in coll_world.named_objects.items() if n[:4] == 'pr2/' } batch = {o: 2.0 for o in robot_parts.values()} print( 'Benchmarking by querying distances for {} objects. Total object count: {}.\n' .format(len(batch), len(coll_world.names))) dur_update = [] dur_distances = [] # print('Mesh files:\n{}'.format('\n'.join(sorted({pb.pb.get_shape_filename(s) for s in sum([o.collision_shape.child_shapes for o in coll_world.collision_objects], [])})))) # print('Objects:\n{}'.format('\n'.join(['{}:\n {}\n {} {}'.format(n, # o.transform.rotation, # o.transform.origin, # pb.pb.get_shape_filename(o.collision_shape.get_child(0))) # for n, o in coll_world.named_objects.items()]))) for x in tqdm(range(100)): start = Time.now() coll_world.update_world({ s: v for s, v in zip(joint_symbols, np.random.rand(len(joint_symbols))) }) dur_update.append(Time.now() - start) start = Time.now() distances = coll_world.closest_distances(batch) dur_distances.append(Time.now() - start) dur_update_mean = sum([d.to_sec() for d in dur_update]) / len(dur_update) dur_distances_mean = sum([d.to_sec() for d in dur_distances]) / len(dur_distances) print('Update mean: {}\nUpdate max: {}\n' 'Distances mean: {}\nDistances max: {}\n'.format( dur_update_mean, max(dur_update).to_sec(), dur_distances_mean, max(dur_distances).to_sec(), ))
vis = ROSVisualizer('axis_vis', 'world') az, ay = [Position(x) for x in 'ax az'.split(' ')] frame_rpy = frame3_rpy(0, ay, az, point3(0,0,0)) state = {ay: 0, az: 0} points = [point3(0,0,0) + get_rot_vector(frame_rpy.subs({ay: sin(v), az: cos(v)})) for v in [(3.14512 / 25) * x for x in range(51)]] vis.begin_draw_cycle('points') vis.draw_strip('points', se.eye(4), 0.03, points) vis.render('points') rospy.sleep(1) timer = Time() while not rospy.is_shutdown(): now = Time.now() if (now - timer).to_sec() >= 0.02: state[ay] = sin(now.to_sec()) state[az] = cos(now.to_sec()) frame = frame_rpy.subs(state) axis = get_rot_vector(frame) vis.begin_draw_cycle('visuals') vis.draw_poses('visuals', se.eye(4), 0.4, 0.02, [frame]) vis.draw_vector('visuals', pos_of(frame), axis) vis.render('visuals')