def transformFromPose(cls, p): t = Transform() t.translation.x = p.position.x t.translation.y = p.position.y t.translation.z = p.position.z t.rotation = deepcopy(p.orientation) return t
def update_kalman(ar_tags): print "started update" rospy.wait_for_service('innovation') update = rospy.ServiceProxy('innovation', NuSrv) listener = tf.TransformListener() while True: try: try: (trans, rot) = listener.lookupTransform(ar_tags['arZ'], ar_tags['ar1'], rospy.Time(0)) except: print "Couldn't look up transform" continue lin = Vector3() quat = Quaternion() lin.x = trans[0] lin.y = trans[1] lin.z = trans[2] quat.x = rot[0] quat.y = rot[1] quat.z = rot[2] quat.w = rot[3] transform = Transform() transform.translation = lin transform.rotation = quat test = update(transform, ar_tags['ar1']) print "Service call succeeded" except rospy.ServiceException, e: print "Service call failed: %s"%e
def matrix_to_transform(H): ''' Convert a 4x4 homogeneous transform matrix to a ros Transform message ''' scale, shear, angles, trans, persp = transformations.decompose_matrix(H) q = transformations.quaternion_from_euler(*angles) transform_msg = Transform() transform_msg.translation = Vector3(*trans) transform_msg.rotation = Quaternion(*q) return transform_msg
def save_dynamic_tf(bag, kitti, kitti_type, initial_time): print("Exporting time dependent transformations") if kitti_type.find("raw") != -1: for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): tf_oxts_msg = TFMessage() tf_oxts_transform = TransformStamped() tf_oxts_transform.header.stamp = rospy.Time.from_sec( float(timestamp.strftime("%s.%f"))) tf_oxts_transform.header.frame_id = 'world' tf_oxts_transform.child_frame_id = 'base_link' transform = (oxts.T_w_imu) t = transform[0:3, 3] q = tf.transformations.quaternion_from_matrix(transform) oxts_tf = Transform() oxts_tf.translation.x = t[0] oxts_tf.translation.y = t[1] oxts_tf.translation.z = t[2] oxts_tf.rotation.x = q[0] oxts_tf.rotation.y = q[1] oxts_tf.rotation.z = q[2] oxts_tf.rotation.w = q[3] tf_oxts_transform.transform = oxts_tf tf_oxts_msg.transforms.append(tf_oxts_transform) bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp) elif kitti_type.find("odom") != -1: timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps) for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0): tf_msg = TFMessage() tf_stamped = TransformStamped() tf_stamped.header.stamp = rospy.Time.from_sec(timestamp) tf_stamped.header.frame_id = 'world' tf_stamped.child_frame_id = 'camera_left' t = tf_matrix[0:3, 3] q = tf.transformations.quaternion_from_matrix(tf_matrix) transform = Transform() transform.translation.x = t[0] transform.translation.y = t[1] transform.translation.z = t[2] transform.rotation.x = q[0] transform.rotation.y = q[1] transform.rotation.z = q[2] transform.rotation.w = q[3] tf_stamped.transform = transform tf_msg.transforms.append(tf_stamped) bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)
def list2Transform(xyzrpy): transform = Transform() # transform.header.frame_id = "iiwa_link_0" transform.translation.x = xyzrpy[0] transform.translation.y = xyzrpy[1] transform.translation.z = xyzrpy[2] transform.rotation = quaternion_from_euler(xyzrpy[3], xyzrpy[4], xyzrpy[5]) return transform
def pose_to_transform(self, pose): """ :param pose: Pose :return: Transform """ t = Transform() t.translation = pose.position t.rotation = pose.orientation return t
def map_transform(values): """ Map the values generated with the hypothesis-ros transform strategy to a rospy Transform type. """ if not isinstance(values, _Transform): raise TypeError('Wrong type. Use appropriate hypothesis-ros type.') ros_transform = Transform() ros_transform.translation = map_vector3(values.translation) ros_transform.rotation = map_quaternion(values.rotation) return ros_transform
def publish_waypoint(x,y,z,yaw): """ Publish a waypoint to """ command_publisher = rospy.Publisher('/iris_1/command/trajectory', MultiDOFJointTrajectory, queue_size = 10) # create trajectory msg traj = MultiDOFJointTrajectory() traj.header.stamp = rospy.Time.now() traj.header.frame_id = 'frame' traj.joint_names.append('base_link') # create start point for trajectory transforms = Transform() # transforms.translation.x = 0 # transforms.translation.y = 0 # transforms.translation.z = 0 # quat = tf.transformations.quaternion_from_euler(yaw*np.pi/180.0, 0, 0, axes = 'rzyx') # transforms.rotation.x = quat[0] # transforms.rotation.y = quat[1] # transforms.rotation.z = quat[2] # transforms.rotation.w = quat[3] #rospy.loginfo("inn---- ",Twist()) velocities = Twist() rospy.loginfo(velocities) accel = Twist() point = MultiDOFJointTrajectoryPoint([transforms],[velocities],[accel],rospy.Time(2)) traj.points.append(point) # create end point for trajectory transforms = Transform() # transforms.translation.x = x # transforms.translation.y = y # transforms.translation.z = z # quat = tf.transformations.quaternion_from_euler(yaw*np.pi/180.0, 0, 0, axes = 'rzyx') # transforms.rotation.x = quat[0] # transforms.rotation.y = quat[1] # transforms.rotation.z = quat[2] # transforms.rotation.w = quat[3] velocities = Twist() #velocities.linear.x=63.0 rospy.loginfo(velocities) accel = Twist() point = MultiDOFJointTrajectoryPoint([transforms],[velocities],[accel],rospy.Time(2)) traj.points.append(point) rospy.sleep(1) command_publisher.publish(traj)
def tf_to_msg(transform): """ Convert a transform expressed as a 4x4 numpy array to geometry_msgs/Transform. """ translation = transform[0:3, 3] rotation = transformations.quaternion_from_matrix(transform) msg = Transform() msg.translation = Vector3(*translation) msg.rotation = Quaternion(*rotation) return msg
def movePlayer(tf_broadcaster, player_name, transform_now, vel, angle, max_vel): """ Moves a player given its currrent pose, a velocity, and angle, and a maximum velocity :param tf_broadcaster: Used to publish the new pose of the player :param player_name: string with the name of the player (must coincide with the name of the tf frame_id) :param transform_now: a geometry_msgs.msg.Transform() containing the current pose. This variable is updated with the new player pose :param vel: velocity of displacement to take in x axis :param angle: angle to turn, limited by max_angle (pi/30) :param max_vel: maximum velocity or displacement based on the selected animal """ max_angle = math.pi / 30 if angle > max_angle: angle = max_angle elif angle < -max_angle: angle = -max_angle if vel > max_vel: vel = max_vel T1 = transform_now T2 = Transform() T2.rotation = tf.transformations.quaternion_from_euler(0, 0, angle) T2.translation.x = vel matrix_trans = tf.transformations.translation_matrix( (T2.translation.x, T2.translation.y, T2.translation.z)) matrix_rot = tf.transformations.quaternion_matrix( (T2.rotation[0], T2.rotation[1], T2.rotation[2], T2.rotation[3])) matrixT2 = np.matmul(matrix_trans, matrix_rot) matrix_trans = tf.transformations.translation_matrix( (T1.translation.x, T1.translation.y, T1.translation.z)) matrix_rot = tf.transformations.quaternion_matrix( (T1.rotation.x, T1.rotation.y, T1.rotation.z, T1.rotation.w)) matrixT1 = np.matmul(matrix_trans, matrix_rot) matrix_new_transform = np.matmul(matrixT1, matrixT2) quat = tf.transformations.quaternion_from_matrix(matrix_new_transform) trans = tf.transformations.translation_from_matrix(matrix_new_transform) T1.rotation = Quaternion(quat[0], quat[1], quat[2], quat[3]) T1.translation.x = trans[0] T1.translation.y = trans[1] T1.translation.z = trans[2] tf_broadcaster.sendTransform(trans, quat, rospy.Time.now(), player_name, "world")
def initializeBackgroundSubtractor(robot_list, gazebo_set_pose_client): """! Create the background subtractor. Then, manipulate the Gazebo environment to allow the capture of a background image. This uses the MOG2 algorithm. @param robot_list A list of the robots that shouldn't be part of the background. @param gazebo_set_pose_client A server client used to move every robot within Gazebo. @return The background subtractor used in the script. """ # Set a large history to maximize the detection of differences from background. background_history = ParameterLookup.lookupWithDefault( parameter='~background_subtractor/history_length', default=100) # Explore how shadow detection impacts the results. var_threshold = ParameterLookup.lookupWithDefault( parameter='~background_subtractor/var_threshold', default=500) detect_shadows = ParameterLookup.lookupWithDefault( parameter='~background_subtractor/detect_shadows', default=False) background_subtractor = cv2.createBackgroundSubtractorMOG2( history=background_history, varThreshold=var_threshold, detectShadows=detect_shadows) # Create a client to determine where each robot is located within the Gazebo environment. # This is only done at initialization to move every robot out of the way. It is assumed the # robots start in a stable location. So the system is safe to just move them up in the air, # but leave their X/Y the same. Given Gazebo's propensity to send colliding objects flying, # this seems like the best course of action. The user is unlikely to start the node if # the simulation freaks out right away. Using the bag file first location is possible, but # would require passing the tf_buffer into this function. After initialization, the bag file # positions are used for the rest of the node. gazebo_get_pose_name = '/gazebo/get_model_state' rospy.loginfo('Waiting to find robot poses...') rospy.wait_for_service(gazebo_get_pose_name) gazebo_get_pose_client = rospy.ServiceProxy( name=gazebo_get_pose_name, service_class=GetModelState) # Move all the robots way up in the sky, presumably outside of the view of the camera. rospy.loginfo('Moving robots and capturing background...') for robot in robot_list: # Create each message via the robot object. robot_current_state = gazebo_get_pose_client(robot.getName(), '') robot_transform_msg = Transform() robot_transform_msg.translation.x = robot_current_state.pose.position.x robot_transform_msg.translation.y = robot_current_state.pose.position.y robot_transform_msg.translation.z = robot_current_state.pose.position.z robot_transform_msg.rotation = robot_current_state.pose.orientation robot.recordTransform(robot_transform_msg) robot_new_pose_request = robot.createSetModelStateRequest() robot_new_pose_request.pose.position.z = 1e6 moveRobot(gazebo_set_pose_client, robot_new_pose_request) # Capture N images and apply to subtractor rospy.sleep(2.0) for _ in range(background_history): # Sleep long enough for any noise to update (image, _) = captureImage(sleep_duration=0.25) background_subtractor.apply(image) return background_subtractor
def callback(scan): try: t, r = tf_listener.lookupTransform(topics.WORLD_FRAME, topics.ODOMETRY_FRAME, rospy.Time(0)) transform = Transform(translation=Vector3(*t), rotation=Quaternion(*r)) except Exception as e: rospy.logerr('Failed to lookup transform') transform = Transform(translation=Vector3(0, 0, 0), rotation=Quaternion(0, 0, 0, 1)) map_pub.update( MapUpdate(scan=process_msg(unpickle(scan)), transform=transform))
def tf_mat2msg(self, transform_mat): """ Convert 4x4 transform numpy matrix to a Transform message type. param transform_mat : 4x4 numpy Matrix transformation """ quat = tf.transformations.quaternion_from_matrix(transform_mat) trans = tf.transformations.translation_from_matrix(transform_mat) result = Transform() result.translation = Vector3(trans[0], trans[1], trans[2]) result.rotation = Quaternion(quat[0], quat[1], quat[2], quat[3]) return result
def pose_to_matrix(pose): # type: (Union[Pose, PoseStamped]) -> np.ndarray """ Converts a Pose to the corresponding homogenous transformation matrix. """ if isinstance(pose, PoseStamped): pose = pose.pose transform = Transform() transform.translation.x = pose.position.x transform.translation.y = pose.position.y transform.translation.z = pose.position.z transform.rotation = pose.orientation return transform_to_matrix(transform)
def carla_transform_to_ros_transform(carla_transform): """ Convert carla transform to a ROS transform Considers the conversion from the left-handed system (unreal) to the right-handed system (ROS) See carla_location_to_ros_vector3() and carla_rotation_to_ros_quaternion() for details :param carla_transform: The Carla Transform :type carla_transform: carla.Transform :return: a ROS transform :rtype: geometry_msgs.msg.Transform """ ros_transform = Transform() ros_transform.translation = carla_location_to_ros_vector3(carla_transform.location) ros_transform.rotation = carla_rotation_to_ros_quaternion(carla_transform.rotation) return ros_transform
def test_april_tags_single_interval(self): #Setup the publisher and subscribers self.pub_april_tags = rospy.Publisher( "visual_odometry_april_tags_node/april_tags", AprilTags, queue_size=1, latch=True) self.pub_wheels_cmd = rospy.Publisher( "visual_odometry_april_tags_node/wheels_cmd", WheelsCmdStamped, queue_size=1, latch=True) # Wait for the forward_kinematics_node to finish starting up timeout = time.time() + 5.0 while (self.pub_wheels_cmd.get_num_connections() < 1 or self.pub_april_tags.get_num_connections() < 1) and \ not rospy.is_shutdown() and not time.time()>timeout: rospy.sleep(0.1) # Publish a single wheels cmd, and two simple april tag messages msg_wheels_cmd = WheelsCmdStamped() msg_wheels_cmd.vel_left = 1 msg_wheels_cmd.vel_right = 1 self.pub_wheels_cmd.publish(msg_wheels_cmd) rospy.sleep(0.2) #Wait so the tags come in the right order T1 = Transform() T2 = Transform() T1.translation.y = 2 T2.translation.y = 2 T2.rotation.x, T2.rotation.y, T2.rotation.z, T2.rotation.w = tr.quaternion_about_axis( -np.pi / 2, (0, 0, 1)) msg_tag1 = AprilTags() tag = TagDetection() tag.transform = T1 msg_tag1.detections.append(tag) msg_tag1.header.stamp = rospy.Duration(0) self.pub_april_tags.publish(msg_tag1) rospy.sleep(0.2) #Wait so the tags come in the right order msg_tag2 = AprilTags() msg_tag1.header.stamp = rospy.Duration(1) tag.transform = T2 msg_tag1.detections.append(tag) self.pub_april_tags.publish(msg_tag1) # Wait 1 second for the file to be output rospy.sleep(3) res = np.genfromtxt( rospy.get_param("visual_odometry_april_tags_node/filename", '')) assert_almost_equal(res, np.array([1, 1, 1, np.pi / 2, 2, 2]))
def publish_waypoint(self, x, y, z, yaw): traj = MultiDOFJointTrajectory() traj.header.stamp = rospy.Time.now() traj.header.frame_id = 'frame' traj.joint_names.append('base_link') # Create start point transforms = Transform() transforms.translation.x = 0 transforms.translation.y = 0 transforms.translation.z = 0 quat = tf.transformations.quaternion_from_euler(yaw * np.pi / 180.0, 0, 0, axes='rzyx') transforms.rotation.x = quat[0] transforms.rotation.y = quat[1] transforms.rotation.z = quat[2] transforms.rotation.w = quat[3] velocities = Twist() accel = Twist() point = MultiDOFJointTrajectoryPoint([transforms], [velocities], [accel], rospy.Time(2)) traj.points.append(point) # create end point for trajectory transforms = Transform() transforms.translation.x = x transforms.translation.y = y transforms.translation.z = z quat = tf.transformations.quaternion_from_euler(yaw * np.pi / 180.0, 0, 0, axes='rzyx') transforms.rotation.x = quat[0] transforms.rotation.y = quat[1] transforms.rotation.z = quat[2] transforms.rotation.w = quat[3] velocities = Twist() accel = Twist() point = MultiDOFJointTrajectoryPoint([transforms], [velocities], [accel], rospy.Time(2)) traj.points.append(point) rospy.sleep(0.01) self.command_publisher.publish(traj)
def run(self): """ The run function for this step Args: Yields: chuck_approach_pose (geometry_msgs/PoseStamped) : approach pose for impedance control with the chuck .. seealso:: :meth:`task_executor.abstract_step.AbstractStep.run` """ rospy.loginfo("Action {}: Detecting schunk.".format(self.name)) self._stopped = False # Ask for the schunk detector stub_tf = Transform() stub_pcl = PointCloud2() stub = TemplateMatchRequest() stub.initial_estimate = stub_tf stub.target_cloud = stub_pcl chuck_approach_pose = self._detect_schunk_srv(stub).template_pose self.notify_service_called( DetectSchunkAction.DETECT_SCHUNK_SERVICE_NAME) yield self.set_running() # Check the status of the server if self._stopped: yield self.set_preempted(action=self.name, srv=self._detect_schunk_srv.resolved_name, chuck_approach_pose=chuck_approach_pose) else: yield self.set_succeeded(chuck_approach_pose=chuck_approach_pose)
def timer_callback(self, event): if self.last_recieved_stamp is None: return if self.last_recieved_stamp == self.last_stamp: return #TODO self.last_stamp = self.last_recieved_stamp cmd = Odometry() cmd.header.stamp = self.last_recieved_stamp # print(cmd.header.stamp) cmd.header.frame_id = 'odom' # this used to be map # I set this to be 'odom' cmd.child_frame_id = 'base_footprint' #I set to be base_link #BeiYou set to be base_link #this used to be odom cmd.pose.pose = self.last_received_pose cmd.twist.twist = self.last_received_twist self.pub_odom.publish(cmd) tf = TransformStamped(header=Header(frame_id=cmd.header.frame_id, stamp=cmd.header.stamp), child_frame_id=cmd.child_frame_id, transform=Transform( translation=cmd.pose.pose.position, rotation=cmd.pose.pose.orientation)) self.tf_pub.sendTransform(tf)
def ros_publish_waypoint(self, action): """Publish single-point ROS trajectory message with given x, y and default z, att.""" # create trajectory msg traj = MultiDOFJointTrajectory() traj.header.stamp = rospy.Time.now() traj.header.frame_id = 'frame' traj.joint_names.append('base_link') # create end point for trajectory transforms = Transform() transforms.translation.x = action[0] transforms.translation.y = action[1] transforms.translation.z = Z quat = tf.transformations.quaternion_from_euler(0, 0, 0, axes='rzyx') transforms.rotation.x = quat[0] transforms.rotation.y = quat[1] transforms.rotation.z = quat[2] transforms.rotation.w = quat[3] velocities = Twist() accel = Twist() point = MultiDOFJointTrajectoryPoint([transforms], [velocities], [accel], rospy.Time()) traj.points.append(point) self.waypoint_publisher.publish(traj)
def __init__(self, calibration_parameters=None, transformation=None): """ Creates a HandeyeCalibration object. :param transformation: transformation between optical origin and base/tool robot frame as tf tuple :type transformation: ((float, float, float), (float, float, float, float)) :return: a HandeyeCalibration object :rtype: easy_handeye.handeye_calibration.HandeyeCalibration """ self.parameters = calibration_parameters if transformation is None: transformation = ((0, 0, 0), (0, 0, 0, 1)) self.transformation = TransformStamped(transform=Transform( Vector3(*transformation[0]), Quaternion(*transformation[1]))) """ transformation between optical origin and base/tool robot frame :type: geometry_msgs.msg.TransformedStamped """ # tf names if self.parameters.eye_on_hand: self.transformation.header.frame_id = calibration_parameters.robot_effector_frame else: self.transformation.header.frame_id = calibration_parameters.robot_base_frame self.transformation.child_frame_id = calibration_parameters.tracking_base_frame
def affineToTransform(affine, summed_transform): transformation = Transform() if affine is not None: scalex = np.linalg.norm(affine[:, 0]) scalez = np.linalg.norm(affine[:, 1]) # offset camera center -> translate affected by scale t_offsetx = 320 * (1 - scalex) / 2 t_offsetz = 240 * (1 - scalez) / 2 # calc translation transformation.translation.y = 1 / ((scalex + scalez) / 2) transformation.translation.x = int( affine[0, 2] - t_offsetx) #* summed_transform.translation.y transformation.translation.z = int( affine[1, 2] - t_offsetz) #* summed_transform.translation.y # calc rotation affine[:, 0] /= scalex affine[:, 1] /= scalez yaw = math.atan2(affine[1, 0], affine[0, 0]) rotation = tf.transformations.quaternion_from_euler(0, 0, yaw) transformation.rotation.x = rotation[0] transformation.rotation.y = rotation[1] transformation.rotation.z = rotation[2] transformation.rotation.w = rotation[3] return transformation else: return None
def save_dynamic_tf(bag, timestamps, tf_matrices, child_frame_id): print("Exporting time dependent transformations") for timestamp, tf_matrix in zip(timestamps, tf_matrices): tf_msg = TFMessage() tf_stamped = TransformStamped() tf_stamped.header.stamp = to_rostime(timestamp) tf_stamped.header.frame_id = 'world' tf_stamped.child_frame_id = child_frame_id t = tf_matrix[0:3, 3] q = quaternion_from_matrix(tf_matrix) transform = Transform() transform.translation.x = t[0] transform.translation.y = t[1] transform.translation.z = t[2] transform.rotation.x = q[0] transform.rotation.y = q[1] transform.rotation.z = q[2] transform.rotation.w = q[3] tf_stamped.transform = transform tf_msg.transforms.append(tf_stamped) bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)
def publish_pose_speed_command(self, x, y, z, rx, ry, rz, speed_vector): print('Command pose: '+self._namespace, x, y, z, rx, ry, rz, speed_vector ) quaternion = tf.transformations.quaternion_from_euler(rx, ry, rz) rotation = Quaternion(quaternion[0], quaternion[1], quaternion[2], quaternion[3]) location = Point(x, y, z) transforms = Transform(translation=location, rotation=rotation) velocities = Twist() velocities.linear = speed_vector accelerations = Twist() traj = MultiDOFJointTrajectory() header = std_msgs.msg.Header() header.stamp = rospy.Time() header.frame_id = 'frame' traj.joint_names.append('base_link') traj.header = header point = MultiDOFJointTrajectoryPoint([transforms], [velocities], [accelerations], rospy.Time(0)) traj.points.append(point) self.command_pub.publish(traj)
def fromRT(rt): if (rt[0, 0] + rt[1, 1] + rt[2, 2] > 0): s = math.sqrt(1.0 + rt[0, 0] + rt[1, 1] + rt[2, 2]) * 2 #s=qw*4 qw = s / 4 qx = (rt[2, 1] - rt[1, 2]) / s qy = (rt[0, 2] - rt[2, 0]) / s qz = (rt[1, 0] - rt[0, 1]) / s elif ((rt[0, 0] > rt[1, 1]) and (rt[0, 0] > rt[2, 2])): s = math.sqrt(1.0 + rt[0, 0] - rt[1, 1] - rt[2, 2]) * 2 #s=qx*4 qw = (rt[2, 1] - rt[1, 2]) / s qx = s / 4 qy = (rt[0, 1] + rt[1, 0]) / s qz = (rt[0, 2] + rt[2, 0]) / s elif (rt[1, 1] > rt[2, 2]): s = math.sqrt(1.0 - rt[0, 0] + rt[1, 1] - rt[2, 2]) * 2 #s=qy*4 qw = (rt[0, 2] - rt[2, 0]) / s qx = (rt[0, 1] + rt[1, 0]) / s qy = s / 4 qz = (rt[1, 2] + rt[2, 1]) / s else: s = math.sqrt(1.0 - rt[0, 0] - rt[1, 1] + rt[2, 2]) * 2 #s=qz*4 qw = (rt[1, 0] - rt[0, 1]) / s qx = (rt[0, 2] + rt[2, 0]) / s qy = (rt[1, 2] + rt[2, 1]) / s qz = s / 4 tf = Transform() tf.rotation.w = qw tf.rotation.x = qx tf.rotation.y = qy tf.rotation.z = qz tf.translation.x = rt[0, 3] tf.translation.y = rt[1, 3] tf.translation.z = rt[2, 3] return tf
def collisionCallback(self, feedback): ''' This callback runs on every feedback message received ''' validity_msg = GetStateValidityRequest( ) # Build message to verify collision validity_msg.group_name = PLANNING_GROUP if self.next_pose and (not self.collision): self.odometry_me.acquire() x = self.odometry.position.x y = self.odometry.position.y z = self.odometry.position.z # Distance between the robot and the next position dist = sqrt((self.next_pose.position.x - x)**2 + (self.next_pose.position.y - y)**2 + (self.next_pose.position.z - z)**2) # Pose to verify collision pose = Transform() pose.rotation.x = self.odometry.orientation.x pose.rotation.y = self.odometry.orientation.y pose.rotation.z = self.odometry.orientation.z pose.rotation.w = self.odometry.orientation.w self.odometry_me.release() #Verify possible collisions on diferent points between the robot and the goal point # rospy.logerr("\n\n\nCOLLISION CALLBACK: ") # rospy.logerr(dist) for d in arange(RESOLUTION, dist + 0.5, RESOLUTION): pose.translation.x = (self.next_pose.position.x - x) * (d / dist) + x pose.translation.y = (self.next_pose.position.y - y) * (d / dist) + y pose.translation.z = (self.next_pose.position.z - z) * (d / dist) + z self.current_pose.multi_dof_joint_state.transforms[ 0] = pose # Insert the correct odometry value validity_msg.robot_state = self.current_pose # Call service to verify collision collision_res = self.validity_srv.call(validity_msg) # print("\nCollision response:") # print(collision_res) # Check if robot is in collision if not collision_res.valid: # print(validity_msg) rospy.logerr('Collision in front [x:{} y:{} z:{}]'.format( pose.translation.x, pose.translation.y, pose.translation.z)) rospy.logerr('Current pose [x:{} y:{} z:{}]'.format( x, y, z)) # print(collision_res) self.move_client.cancel_goal() self.collision = True return
def setUp(self): self.telecentric_projection_client = actionlib.SimpleActionClient( "project_telecentric", TelecentricProjectionAction) self.request_data_client = actionlib.SimpleActionClient( "request_data", RequestDataAction) for client in [ self.telecentric_projection_client, self.request_data_client ]: if not client.wait_for_server(rospy.Duration(TIMEOUT)): self.fail(msg="Request_data action servers timed out!") self.pc_subscriber = rospy.Subscriber("/projected_point_cloud", PointCloud2, self.callback) # Rotation, that is 90 degrees rotated to the original camera in the test self.trafo = Transform() self.trafo.rotation.x = 0 self.trafo.rotation.y = -0.7071068 self.trafo.rotation.z = 0 self.trafo.rotation.w = 0.7071068 self.trafo.translation.x = 0 self.trafo.translation.y = 0 self.trafo.translation.z = 0 # Test the projected depth image self.request_point_cloud() self.send_goal_depth_image() self.generate_line_estimate() # Test the point cloud self.send_goal_point_cloud() self.get_projected_point_cloud() # Test the subscribed cloud self.send_goal_with_publishing_point_cloud()
def publish_command(position,velocity): rospy.init_node('goto_poition',anonymous=True) firefly_command_publisher = rospy.Publisher('/firefly/command/trajectory',MultiDOFJointTrajectory,queue_size=10) desired_yaw = -10 desired_x = position[0] desired_y = position[1] desired_z = position[2] quaternion = tf.transformations.quaternion_from_euler(0,0,math.radians(desired_yaw)) traj = MultiDOFJointTrajectory() header = std_msgs.msg.Header() header.stamp = rospy.Time() header.frame_id = 'frame' traj.joint_names.append('base_link') traj.header = header transform = Transform(translation=Point(desired_x,desired_y,desired_z),rotation=Quaternion(quaternion[0],quaternion[1],quaternion[2],quaternion[3])) velocities = Twist() velocities.linear.x = velocity[0] velocities.linear.y = velocity[1] velocities.linear.z = velocity[2] accelerations = Twist() point = MultiDOFJointTrajectoryPoint([transform],[velocities],[accelerations],rospy.Time(2)) traj.points.append(point) time.sleep(3) firefly_command_publisher.publish(traj) rospy.loginfo("Have published %s into %s!",position+velocity,'/firefly/command/trajectory')
def __init__(self, name): """! The init function instantiates the class and loads all of the relevant information from the parameter server. @param nameThe name of this robot. Also includes how to find information about it on the parameter server. """ # Set the name self._name = name # Look up the class number and associated ID self._class_id = ParameterLookup.lookup( '~' + self._resolveString('class')) self._id = ParameterLookup.lookup('~' + self._resolveString("id")) # Also look up the keypoints used for this robot self._keypoints = self._initializeKeypoints( '~' + self._resolveString("keypoints")) # Look up the bounding shape, which is treated mathematically # the same as keypoints. self._bounding_shape = self._initializeKeypoints( '~' + self._resolveString('bounding_shape')) # Determine what name to use to find the robot in the TF tree. self._frame_id = ParameterLookup.lookupWithDefault( '~' + self._resolveString("frame_id"), "base_link") # Set the pose to the origin. transform = Transform() transform.rotation.w = 1.0 self.recordTransform(transform)
def get_current_ros_transform(self): """ Override function used to return the current ROS transform of the object The global map frame has an empty transform. :return: """ return Transform()
def from_dict(in_dict): """ Sets values parsed from a given dictionary. :param in_dict: input dictionary. :type in_dict: dict[string, string|dict[string,float]] :rtype: None """ global robot_base_frame global robot_effector_frame global tracking_base_frame global eye_on_hand global transformation eye_on_hand = in_dict['eye_on_hand'] robot_base_frame = in_dict['robot_base_frame'] tracking_base_frame = in_dict['tracking_base_frame'] transformation = TransformStamped( child_frame_id=in_dict['tracking_base_frame'], transform=Transform( Vector3(in_dict['transformation']['x'], in_dict['transformation']['y'], in_dict['transformation']['z']), Quaternion(in_dict['transformation']['qx'], in_dict['transformation']['qy'], in_dict['transformation']['qz'], in_dict['transformation']['qw']))) if eye_on_hand: robot_effector_frame = in_dict['robot_effector_frame'] transformation.header.frame_id = in_dict['robot_effector_frame'] else: transformation.header.frame_id = in_dict['robot_base_frame']
def get_lines(self, fmt): t = Time() t.secs = 0 # MESSAGE_TO_TUM_SHORT line = ROSMsg2CSVLine.to(fmt, Point(), t, ROSMessageTypes.GEOMETRY_MSGS_VECTOR3) line = ROSMsg2CSVLine.to(fmt, PointStamped(), t, ROSMessageTypes.GEOMETRY_MSGS_POINTSTAMPED) line = ROSMsg2CSVLine.to(fmt, Vector3(), t, ROSMessageTypes.GEOMETRY_MSGS_VECTOR3) line = ROSMsg2CSVLine.to(fmt, Vector3Stamped(), t, ROSMessageTypes.GEOMETRY_MSGS_VECTOR3STAMPED) line = ROSMsg2CSVLine.to( fmt, PoseWithCovarianceStamped(), t, ROSMessageTypes.GEOMETRY_MSGS_POSEWITHCOVARIANCESTAMPED) line = ROSMsg2CSVLine.to( fmt, PoseWithCovariance(), t, ROSMessageTypes.GEOMETRY_MSGS_POSEWITHCOVARIANCE) line = ROSMsg2CSVLine.to(fmt, PoseStamped(), t, ROSMessageTypes.GEOMETRY_MSGS_POSESTAMPED) line = ROSMsg2CSVLine.to(fmt, Pose(), t, ROSMessageTypes.GEOMETRY_MSGS_POSE) line = ROSMsg2CSVLine.to(fmt, Quaternion(), t, ROSMessageTypes.GEOMETRY_MSGS_QUATERNION) line = ROSMsg2CSVLine.to( fmt, QuaternionStamped(), t, ROSMessageTypes.GEOMETRY_MSGS_QUATERNIONSTAMPED) line = ROSMsg2CSVLine.to(fmt, Transform(), t, ROSMessageTypes.GEOMETRY_MSGS_TRANSFORM) line = ROSMsg2CSVLine.to( fmt, TransformStamped(), t, ROSMessageTypes.GEOMETRY_MSGS_TRANSFORMSTAMPED) return line
def JointTrajectory2MultiDofTrajectory(self, joint_trajectory): multi_dof_trajectory = MultiDOFJointTrajectory() for i in range(0, len(joint_trajectory.points)): temp_point = MultiDOFJointTrajectoryPoint() temp_transform = Transform() temp_transform.translation.x = joint_trajectory.points[ i].positions[0] temp_transform.translation.y = joint_trajectory.points[ i].positions[1] temp_transform.translation.z = joint_trajectory.points[ i].positions[2] temp_transform.rotation.w = 1.0 temp_vel = Twist() temp_vel.linear.x = joint_trajectory.points[i].velocities[0] temp_vel.linear.y = joint_trajectory.points[i].velocities[1] temp_vel.linear.z = joint_trajectory.points[i].velocities[2] temp_acc = Twist() temp_acc.linear.x = joint_trajectory.points[i].accelerations[0] temp_acc.linear.y = joint_trajectory.points[i].accelerations[1] temp_acc.linear.z = joint_trajectory.points[i].accelerations[2] temp_point.transforms.append(temp_transform) temp_point.velocities.append(temp_vel) temp_point.accelerations.append(temp_acc) multi_dof_trajectory.points.append(temp_point) return multi_dof_trajectory
def multi_dof_joint_state(cls, msg, obj): obj.header = decode.header(msg, obj.header, "") obj.joint_names = msg["joint_names"] for i in msg['_length_trans']: trans = Transform() trans.translation = decode.translation(msg, trans.translation, i) trans.rotation = decode.rotation(msg, trans.rotation, i) obj.transforms.append(trans) for i in msg['_length_twist']: tw = Twist() tw.linear = decode.linear(msg, tw.linear, i) tw.angular = decode.angular(msg, tw.angular, i) obj.twist.append(twist) for i in msg['_length_twist']: wr = Wrench() wr.force = decode.force(msg, wr.force, i) wr.torque = decode.torque(msg, wr.torque, i) obj.wrench.append(wr) return(obj)
rospy.init_node('virtual_drone', anonymous=True) sub_ref_vel = rospy.Subscriber('cmd_vel', Twist, refVelCallback) sub_ref_traj = rospy.Subscriber('/cmd_traj', JointTrajectory, trajCallback) #sub_ref_pos = rospy.Subscriber('cmd_tf', TransformStamped, reftfCallback) #pub_ref_tf = tf.TransformBroadcaster() pub_joint_states = rospy.Publisher('joint_states', JointState) # use drone name parameter as prefix to joint names if rospy.has_param('drone_name'): drone_name = rospy.get_param('drone_name') my_joint_names=[drone_name + "_" + joint_name for joint_name in my_joint_names] js_msg.name=my_joint_names # and for reference position ref_transform = Transform() # default altitude ref_transform.translation.z = 1.2 # default orientation ref_transform.rotation = quaternion_from_euler(0.0, 0.0, 0.0) # and the reference velocity ref_velocity = Twist() # update rate and time step updateRateHz = 10 delta_t = 1./updateRateHz rate = rospy.Rate(updateRateHz) while not rospy.is_shutdown(): refUpdate() rate.sleep()
def toTransformMsg(tq): t = TransformMSG() t.rotation = toRotMsg(tq) t.translation = toTranslationMsg(tq) return t
def read_files(self): #initialize the reader j=0 tmp_transform=Transform() self.camera_marker_file=open(self.name_cam_marker_file,'r') #read camera marker pose for i in range(self.num_of_lines_of_cam_marker_file): line=self.camera_marker_file.readline() if i==j*3+1: b=list() k=5 while k<len(line): if line[k]==" " or line[k]=="\n": b.append(k) k+=1 tmp_transform.translation.x=float(line[5:b[0]]) tmp_transform.translation.y=float(line[(b[0]+1):b[1]]) tmp_transform.translation.z=float(line[(b[1]+1):b[2]]) if i==j*3+2: b=list() k=5 while k<len(line): if line[k]==" " or line[k]=="\n": b.append(k) k+=1 tmp_transform.rotation.x=float(line[5:b[0]]) tmp_transform.rotation.y=float(line[(b[0]+1):b[1]]) tmp_transform.rotation.z=float(line[(b[1]+1):b[2]]) tmp_transform.rotation.w=float(line[(b[2]+1):b[3]]) self.camera_marker_samples.transforms.append(copy.deepcopy(tmp_transform)) j+=1 # for tr in self.camera_marker_samples.transforms: # print tr # print "\n" self.base_ee_file=open(self.name_base_ee_file,'r') for i in range(self.num_of_lines_of_base_ee_file): line=self.base_ee_file.readline() k=0 b=list() transform=Transform() while k<len(line): if line[k]==" " or line[k]=="\n": b.append(k) k+=1 #get translation transform.translation.x=float(line[0:b[0]]) transform.translation.y=float(line[(b[1]+1):b[2]]) transform.translation.z=float(line[(b[3]+1):b[4]]) #get rotation r=(float(line[(b[5]+1):b[6]])/180)*math.pi p=(float(line[(b[7]+1):b[8]])/180)*math.pi y=(float(line[(b[9]+1):b[10]])/180)*math.pi quat=transformations.quaternion_from_euler(r, p, y, 'sxyz') transform.rotation=quat self.base_ee_samples.transforms.append(transform) #print self.base_ee_samples self.ready=True