def command_ik(self, direction): """Use the Rethink IK service to figure out a desired joint position""" end_pose = self.limb_iface.endpoint_pose() current_p = numpy.array(end_pose['position']+end_pose['orientation']) direction = numpy.concatenate((direction, numpy.zeros(4))) desired_p = current_p + direction ik_command.service_request(self.iksvc, desired_p, self.limb, blocking=False)
def command_ik_pose(self, direction): end_pose = self.limb_iface.endpoint_pose() current_p = numpy.array(end_pose['position'] + end_pose['orientation']) desired_p = current_p + direction ik_command.service_request(self.iksvc, desired_p, self.limb, blocking=False)
def command_ik(self, direction): """Use the Rethink IK service to figure out a desired joint position""" end_pose = self.limb_iface.endpoint_pose() current_p = numpy.array(end_pose['position'] + end_pose['orientation']) direction = numpy.concatenate((direction, numpy.zeros(4))) desired_p = current_p + direction ik_command.service_request(self.iksvc, desired_p, self.limb, blocking=False)
def command_ik(side, direction): """Use the Rethink IK service to figure out a desired joint position""" if side == 'left': limb = left iksvc = left_iksvc ns = left_ns current_p = np.array(limb.endpoint_pose()['position']+limb.endpoint_pose()['orientation']) direction = np.array(direction) print direction desired_p = current_p + direction ik_command.service_request(iksvc, desired_p, side)
def go_to_coordinate(side, c): if side == "left": iksvc = left_iksvc else: iksvc = right_iksvc limb = baxter_interface.Limb(side) coordinate_as_list = [ c['position'].x, c['position'].y, c['position'].z, c['orientation'].x, c['orientation'].y, c['orientation'].z, c['orientation'].w ] print("moving to", coordinate_as_list) ik_command.service_request(iksvc, coordinate_as_list, side)
def command_ik(side, direction): """Use the Rethink IK service to figure out a desired joint position""" if side == 'left': limb = left iksvc = left_iksvc ns = left_ns else: limb = right iksvc = right_iksvc ns = right_ns current_p = np.array(limb.endpoint_pose()['position']+limb.endpoint_pose()['orientation']) direction = np.array(direction) desired_p = current_p + direction ik_command.service_request(iksvc, desired_p, side)
def depth_callback(self, data): print "Estimating depth" # Get multiple poses. For now, just choose the first one, which corresponds to the biggest object if len(data.poses) <= 0: print "no poses" return pose = data.poses[0] p = [pose.position.x, pose.position.y, pose.position.z]+[pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w] ik_command.service_request(self.iksvc, p, self.limb, blocking=True) print p self.done = True print "unregistering" self.depth_handler.unregister()
def update_angles(change_angles): global desired_angles current_angles = np.array(desired_angles) change_angles = np.array(change_angles) #Note that adding np arrays does element-wise summation, but adding python lists concatenates them new_angles = current_angles + change_angles #Check that the angles are in bounds, angles are in radians #for value in new_angles: #Wrap crossing zero towards 2*pi # if value < 0: # value = (2* np.pi) + value #Wrap crossing 2*pi towards zero # if value > (2 * np.pi): # value = value - (2*np.pi) #Convert to a python array and store in the desired angles desired_angles = new_angles.tolist() #Convert the desired angles to a quaternion quat = tf.transformations.quaternion_from_euler(desired_angles[0], desired_angles[1], desired_angles[2]) #This only does the left side, because the scooter only has a left arm limb = left current_p = np.array(limb.endpoint_pose()['position']+limb.endpoint_pose()['orientation']) current_p[3:7] = quat ik_command.service_request(left_iksvc, current_p, "left")
def main(): arg_fmt = argparse.RawDescriptionHelpFormatter parser = argparse.ArgumentParser(formatter_class=arg_fmt, description=main.__doc__) required = parser.add_argument_group('required arguments') required.add_argument( '-l', '--limb', required=True, choices=['left', 'right'], help='send joint trajectory to which limb' ) args = parser.parse_args(rospy.myargv()[1:]) print args limb = args.limb print("Initializing node... ") rospy.init_node("super_stacker_%s" % (limb,)) print("Getting robot state... ") rs = baxter_interface.RobotEnable(CHECK_VERSION) print("Enabling robot... ") rs.enable() #Calibrate gripper gripper_if = baxter_interface.Gripper(limb) if not gripper_if.calibrated(): print "Calibrating gripper" gripper_if.calibrate() limbInterface = baxter_interface.Limb(limb) iksvc, ns = ik_command.connect_service(limb) rate = rospy.Rate(100) # Get goal poses of each object in the scene dc = DepthCaller(limb) # Subscribe to estimate_depth # Move to pose published by estimate_depth while (not dc.done) and (not rospy.is_shutdown()): rate.sleep() #pass # Subscribe to object_finder and start visual servoing/grasping # The stack pose is the pose of the smallest object with a z-offset # accounting for the height of the object (this assumption makes # it really important that the segmentation is accurate) stack_pose = [0.594676466827, -0.296644499519, -0.0322744943164, 0.971805911045, -0.22637170004, 0.065946440385, 0.000437813100735] #stack_pose = dc.object_poses[-1] #stack_pose = incrementPoseZ(stack_pose, object_height) #dc.object_poses.pop(len(dc.object_poses)-1) for pose in dc.object_poses: ik_command.service_request(iksvc, pose, limb, blocking=True) vc = VisualCommand(iksvc, limb) vc.subscribe() while (not vc.done) and (not rospy.is_shutdown()): rate.sleep() ik_command.service_request(iksvc, stack_pose, limb, blocking=True) #limbInterface.move_to_joint_positions(stack_pose) # Let go gripper_if.open(block=True) stack_pose = incrementPoseZ(pose, object_height)
def command_ik_pose(self, direction): end_pose = self.limb_iface.endpoint_pose() current_p = numpy.array(end_pose['position']+end_pose['orientation']) desired_p = current_p + direction ik_command.service_request(self.iksvc, desired_p, self.limb, blocking=False)