Exemplo n.º 1
0
 def receive_marker_bundle(self, data):
     box_tag_seen = False
     belt_tag_seen = False
     for marker in data.markers:
         #just to be sure
         if marker.id == TagIds.MasterTag:
             box_tag_seen = True
             q = utils.msg_to_quaternion(marker.pose.pose.orientation)
             roll,pitch,yaw = euler_from_quaternion(q)
             self.tags_buffer_box.append(TagAR(marker.id,marker.pose.pose.position.x,
                                               marker.pose.pose.position.y,
                                               marker.pose.pose.position.z,
                                               roll, pitch, yaw))
         
         elif marker.id == TagIds.BeltFakeTag:
             belt_tag_seen = True
             q = utils.msg_to_quaternion(marker.pose.pose.orientation)
             roll,pitch,yaw = euler_from_quaternion(q)
             self.tags_buffer_belt.append(TagAR(marker.id,marker.pose.pose.position.x,
                                               marker.pose.pose.position.y,
                                               marker.pose.pose.position.z,
                                               roll, pitch, yaw))
                     
     if not box_tag_seen: self.tags_buffer_box.append(None)
     if not belt_tag_seen: self.tags_buffer_belt.append(None)     
    def getTwist(self):
        twist = super(MoveTo, self).getTwist()
        if twist == None:
            twist = Twist()
        targetPose = self._getTargetPose()
        c_pos = self.pose
        d_x = targetPose.position.x - c_pos.translation.x
        d_y = targetPose.position.y - c_pos.translation.y
        d_len = math.sqrt(d_x ** 2 + d_y ** 2)

        ##tft.euler_from_quaternion
        if d_len < self.target_tresh:
            print("close enough")
            return None
        t_q = targetPose.orientation
        c_q = c_pos.rotation
        t_euler = tft.euler_from_quaternion((t_q.x, t_q.y, t_q.z, t_q.w))
        c_euler = tft.euler_from_quaternion((c_q.x, c_q.y, c_q.z, c_q.w))
        d_rot = t_euler[2] - c_euler[2]
        # if d_rot > self.slow : #FIXME
        # print("small rotation")
        # return twist
        # if (abs(d_x)/abs(d_y)) < 1:
        #  print("small rotation")
        #  print(d_x)
        #  print(d_y)
        #  return twist
        twist.linear.x = self.speed  # TODO add acceleration
        # if twist == None && self.target:

        # if abs(twist.angular.z
        return twist
Exemplo n.º 3
0
    def listen_imu(self, imu):
        # node can sleep for 16 out of 20 ms without dropping any messages
        # rospy.sleep(16.0/1000.0)
        # self.NN += 1
        # if rospy.get_time() - self.t0 > 1:
        #     rospy.logfatal("Published {} messages in 1 s, {} Hz".format(self.NN, self.NN))
        #     self.NN, self.t0 = 0, rospy.get_time()
        current_orientation = np.array([imu.orientation.x,
                                        imu.orientation.y,
                                        imu.orientation.z,
                                        imu.orientation.w])
        current_angular_speed = np.array([imu.angular_velocity.x,
                                          imu.angular_velocity.y,
                                          0.0])
                                          
        current_rpy = list(transformations.euler_from_quaternion(current_orientation))
        # No care about yaw, but must be close to zero
        current_rpy[-1] = 0
        desired_rpy = list(transformations.euler_from_quaternion(self.desired_orientation))
        desired_rpy[-1] = 0
        corrected = self.rp_pid(desired_rpy, current_rpy, current_angular_speed)
        corrected_orientation = transformations.quaternion_multiply(
            transformations.quaternion_from_euler(*corrected),
            transformations.quaternion_from_euler(*current_rpy))

        setpoint_pose = Pose(self.desired_position, corrected_orientation)

        servo_angles = self.platform.ik(setpoint_pose)
        rospy.logdebug(
            "Servo angles (deg): {}".format(np.rad2deg(servo_angles)))
        self.publish_servo_angles(servo_angles)
Exemplo n.º 4
0
def callback(data):
#     print "Received odom..."
    global mapOffset
    global odomPub
    
    #make odometry offset from the original odometry by the mapOffset
    Odom = Odometry()
    Odom.pose.pose.position.x = data.pose.pose.position.x + mapOffset.translation.x
    Odom.pose.pose.position.y = data.pose.pose.position.y + mapOffset.translation.y
    Odom.pose.pose.position.z = data.pose.pose.position.z + mapOffset.translation.z
    
#     print "O'("+str(Odom.pose.pose.position.x)+","+str(Odom.pose.pose.position.y)+")"
    
#     if (Odom.pose.pose.position.x <0 or Odom.pose.pose.position.y <0):
#         print Odom.pose.pose.position
#         raise Exception("NEGATIVE COORDINATES! ")
    
    #set the orientation based on the quaternion from the transformed position
    quat = data.pose.pose.orientation
    q = [quat.x,quat.y,quat.z,quat.w]
    roll, pitch, yaw = euler_from_quaternion(q)
    quat = mapOffset.rotation
    q = [quat.x,quat.y,quat.z,quat.w]
    rollOff, pitchOff, yawOff = euler_from_quaternion(q)
    roll += rollOff
    pitch += pitchOff
    yaw += yawOff
    quater = quaternion_from_euler(roll, pitch, yaw)
    Odom.pose.pose.orientation.w = quater[3]
    Odom.pose.pose.orientation.x = quater[0]
    Odom.pose.pose.orientation.y = quater[1]
    Odom.pose.pose.orientation.z = quater[2]
    
#     print "Publishing shifted odom...\n"
    odomPub.publish(Odom)
 def correct_orientation(self, target_quat):
     current_tf = None
     try:
         current_tf = self.__tf_listener.lookupTransform('/map', '/base_footprint', rospy.Time(0))
     except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
         rospy.logerr("rotate_in_place_recovery: Failed to look up transform")
         return 'failed'
 
     euler_current = euler_from_quaternion(current_tf[1])
     euler_target = euler_from_quaternion(target_quat)
     
     rospy.loginfo("Euler Current: %s", str(euler_current))
     rospy.loginfo("Euler Target: %s", str(euler_target))
     yaw_correction = euler_target[2] - (euler_current[2] + 2*math.pi)
     if yaw_correction > math.pi:
         yaw_correction -= 2*math.pi
     elif yaw_correction < -math.pi:
         yaw_correction += 2*math.pi
     rospy.loginfo("YAW Correction: %f", yaw_correction)
     
     correction = Twist()
     speed = max(abs(yaw_correction) * 0.2, 0.1)
     if yaw_correction < 0:
         correction.angular.z = -speed
     else:
         correction.angular.z = speed
     self.__cmd_vel.publish(correction)
     rospy.sleep(abs(yaw_correction)/speed)
     correction.angular.z = 0.0
     self.__cmd_vel.publish(correction)
Exemplo n.º 6
0
def odomCallback(data):
    global STATE
    if   STATE == 'waiting' or STATE == 'done':
        return
    elif STATE == 'faceUp':
        # Get current yaw in radians
        euler = euler_from_quaternion([data.pose.pose.orientation.x, data.pose.pose.orientation.y, data.pose.pose.orientation.z, data.pose.pose.orientation.w])
        yaw = euler[2]
        # Check if current yaw is up
        if ( abs(yaw-0.0) <= ANGULAR_THRESHOLD ):
            stop()
            STATE = 'turn'
    elif STATE == 'turn':
        # Get current yaw in radians
        euler = euler_from_quaternion([data.pose.pose.orientation.x, data.pose.pose.orientation.y, data.pose.pose.orientation.z, data.pose.pose.orientation.w])
        yaw = euler[2]
        # Check if in desired state
        if ( abs(yaw-desiredAngle) <= ANGULAR_THRESHOLD ):
            stop()
            STATE = 'goToX'
    elif STATE == 'goToX':
        xpos = #Get x pose.pose.position
        if ( abs(xpos-desiredXLocation) <= LINEAR_THRESHOLD ):
            stop()
            STATE = 'done'
Exemplo n.º 7
0
def combinePoses(ps0, ps1, op=operator.add, listener=None):
    """
    Returns a PoseStamped of op(ps0,ps1)
    """
    # must be in same reference frame
    if listener != None:
        try:
            ps0, ps1 = convertToSameFrameAndTime(ps0, ps1, listener)
        except tf.Exception:
            return PoseStamped()

    if ps0 == None or ps1 == None:
        return False

    xtrans0, ytrans0, ztrans0 = ps0.pose.position.x, ps0.pose.position.y, ps0.pose.position.z
    xtrans1, ytrans1, ztrans1 = ps1.pose.position.x, ps1.pose.position.y, ps1.pose.position.z

    wrot0, xrot0, yrot0, zrot0 = ps0.pose.orientation.w, ps0.pose.orientation.x, ps0.pose.orientation.y, ps0.pose.orientation.z    
    wrot1, xrot1, yrot1, zrot1 = ps1.pose.orientation.w, ps1.pose.orientation.x, ps1.pose.orientation.y, ps1.pose.orientation.z

    ps0rot0, ps0rot1, ps0rot2 = tft.euler_from_quaternion([xrot0, yrot0, zrot0, wrot0])
    ps1rot0, ps1rot1, ps1rot2 = tft.euler_from_quaternion([xrot1, yrot1, zrot1, wrot1])

    addedPoint = Point(op(xtrans0,xtrans1), op(ytrans0,ytrans1), op(ztrans0,ztrans1))
    addedEuler = [op(ps0rot0,ps1rot0), op(ps0rot1,ps1rot1), op(ps0rot2,ps1rot2)]
    addedQuaternion = tft.quaternion_from_euler(addedEuler[0], addedEuler[1], addedEuler[2])
    addedOrientation = Quaternion(addedQuaternion[0], addedQuaternion[1], addedQuaternion[2], addedQuaternion[3])

    addedPose = PoseStamped()
    addedPose.header = ps0.header
    addedPose.pose.position = addedPoint
    addedPose.pose.orientation = addedOrientation

    return addedPose
    def init_plan(self, req):
        """
            add in vect_mark[0] the coordinates of the camera deduced from
            the position of the mark we choose to be the origin. This step
            enable the trackin in our plan.
        """
        try:

            marker = MARKER_NAME + str(req.marknumber)

            trans, rot = self.listener.lookupTransform(
                marker, '/map', rospy.Time(0))

            while euler_from_quaternion(rot)[0] > 1.57:
                time.sleep(0.01)
                print "SLEEEEEEEEEEp"
                trans, rot = self.listener.lookupTransform(
                    marker, '/map', rospy.Time(0))

            self.vect_tf[0] = [CAMERA_NAME, MAP, trans, rot]
            print " plan intialization...!"
            print "euler", euler_from_quaternion(self.vect_tf[0][3])
            if req.permanent == True:
                chdir(req.path)
                mon_fichier = open(FILE_SAVED_INIT_PLAN, "w")
                message = write_message([self.vect_tf[0]])
                # print "message", message

                mon_fichier.write(message)
                mon_fichier.close()
            print self.vect_tf
            return InitPlanResponse(True)
        except Exception, exc:
            print "init_plan"
Exemplo n.º 9
0
    def callback(self, msg):
        """
        Measuring the displacement based on the new position
        :param msg: Odometry msg with position and rotation information
        :type msg: nag_msgs.msg.Odometry
        :return: no return
        """
        if self.last_pose:
            new_pose = msg.pose.pose
            pos = new_pose.position
            pos_old = self.last_pose.position
            distance_delta = sqrt(pow(pos.x-pos_old.x, 2) + pow(pos.y-pos_old.y, 2))
            if distance_delta < 0.5:  # If delta is too big, it is incorrect. Not doing anything with this data
                self.total_distance += distance_delta

                new_orientation = new_pose.orientation
                old_orientation = self.last_pose.orientation
                new_rotation = euler_from_quaternion([new_orientation.x,
                                                      new_orientation.y,
                                                      new_orientation.z,
                                                      new_orientation.w])
                old_rotation = euler_from_quaternion([old_orientation.x,
                                                      old_orientation.y,
                                                      old_orientation.z,
                                                      old_orientation.w])
                rotation_delta = new_rotation[2] - old_rotation[2]
                if rotation_delta >= pi:
                    rotation_delta -= 2*pi
                elif rotation_delta <= -pi:
                    rotation_delta += 2*pi
                self.total_rotation += abs(rotation_delta)
            else:
                rospy.logerr("Distance delta too big (%f m), ignoring this step" % distance_delta)

        self.last_pose = msg.pose.pose
Exemplo n.º 10
0
    def publish(self, data):
        q = data.getOrientation()
        roll, pitch, yaw = euler_from_quaternion([q.w, q.x, q.y, q.z])

        # print "Before ", "Yaw: " + str(yaw * 180 / pi), "Roll: " + str(roll * 180 / pi), "pitch: " + str(pitch * 180 / pi), "\n\n"

        array = quaternion_from_euler(roll, pitch, yaw + (self._angle * pi / 180))

        res = Quaternion()
        res.w = array[0]
        res.x = array[1]
        res.y = array[2]
        res.z = array[3]

        roll, pitch, yaw = euler_from_quaternion([q.w, q.x, q.y, q.z])

        # print "after ", "Yaw: " + str(yaw * 180 / pi), "Roll: " + str(roll * 180 / pi), "pitch: " + str(pitch * 180 / pi), "\n\n"

        msg = Imu()
        msg.header.frame_id = self._frameId
        msg.header.stamp = rospy.get_rostime()
        msg.orientation = res
        msg.linear_acceleration = data.getAcceleration()
        msg.angular_velocity = data.getVelocity()

        magMsg = MagneticField()
        magMsg.header.frame_id = self._frameId
        magMsg.header.stamp = rospy.get_rostime()
        magMsg.magnetic_field = data.getMagnetometer()

        self._pub.publish(msg)
        self._pubMag.publish(magMsg)
def run1(data):
    global pointData, points, feedbackData
    if data.time == 0:
    	pointData = data
    elif abs(data.time) < 5:
        it = abs(data.time)
        data.x = feedbackData.feedback.base_position.pose.position.x
        data.y = feedbackData.feedback.base_position.pose.position.y
        data.z = data.time / abs(data.time)
        quaternion = (feedbackData.feedback.base_position.pose.orientation.x, feedbackData.feedback.base_position.pose.orientation.y, feedbackData.feedback.base_position.pose.orientation.z, feedbackData.feedback.base_position.pose.orientation.w)
        z = euler_from_quaternion(quaternion, axes='sxyz')
        data.z = z[2] + data.z * math.pi / 2
        points[it] = data
        rospy.loginfo("Add %d is : X:%f Y:%f Theta:%f" % (it, data.x, data.y, data.z))
    elif data.time == 5:
        data.x = feedbackData.feedback.base_position.pose.position.x
        data.y = feedbackData.feedback.base_position.pose.position.y
        data.z = data.time / abs(data.time)
        quaternion = (feedbackData.feedback.base_position.pose.orientation.x, feedbackData.feedback.base_position.pose.orientation.y, feedbackData.feedback.base_position.pose.orientation.z, feedbackData.feedback.base_position.pose.orientation.w)
        z = euler_from_quaternion(quaternion, axes='sxyz')
        data.z = z[2]
        points[0] = data
        rospy.loginfo("Add home is : X:%f Y:%f Theta:%f" % (data.x, data.y, data.z))
    elif points[data.time-5] is not None:
        pointData.x = points[data.time-5].x
        pointData.y = points[data.time-5].y
        pointData.z = points[data.time-5].z
        pointData.time = data.time
    else:
        rospy.loginfo("I can't find it.")
def callback(truth,odom):
    # Calculate error between truth and SLAM estimate
    x_ODOM_error = truth.pose.pose.position.x - odom.pose.pose.position.x
    y_ODOM_error = truth.pose.pose.position.y - odom.pose.pose.position.y
    
    xo = odom.pose.pose.orientation.x
    yo = odom.pose.pose.orientation.y
    zo = odom.pose.pose.orientation.z
    wo = odom.pose.pose.orientation.w
    
    xt = truth.pose.pose.orientation.x
    yt = truth.pose.pose.orientation.y
    zt = truth.pose.pose.orientation.z
    wt = truth.pose.pose.orientation.w

    # find orientation of robot (Euler coordinates)
    (ro, po, yo) = euler_from_quaternion([xo, yo, zo, wo])    
    (rt, pt, yt) = euler_from_quaternion([xt, yt, zt, wt])
    t_ODOM_error = yt-yo
    
    # query SLAM solution
    map_listener = tf.TransformListener()
    try:
      (trans,rot) = map_listener.lookupTransform('/map', '/odom', rospy.Time(0))
      x_SLAM_error = trans[0]-x_ODOM_error
      y_SLAM_error = trans[1]-y_ODOM_error
      t_SLAM_error = rot[2]-t_ODOM_error
    
      print "{0},{1},{2},{3}".format(rospy.get_time(), x_SLAM_error, y_SLAM_error, t_SLAM_error)

    except(tf.LookupException, tf.ConnectivityException):
      pass
Exemplo n.º 13
0
    def execute(self, userdata):
        rospy.loginfo("Creating goal to put robot in front of handle")
        pose_handle = Pose()
        if userdata.door_detection_data_in_base_link.handle_side == "left" or userdata.door_detection_data_in_base_link.handle_side == "right":  # closed door
            (r, p, theta) = euler_from_quaternion(( userdata.door_detection_data_in_base_link.door_handle.pose.orientation.x,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.y,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.z,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.w))  # gives back r, p, y
            theta += 3.1416  # the orientation of the door is looking towards the robot, we need the inverse
            pose_handle.orientation = Quaternion(*quaternion_from_euler(0, 0, theta))  # orientation to look parallel to the door
            pose_handle.position.x = userdata.door_detection_data_in_base_link.door_handle.pose.position.x - 0.4  # to align the shoulder with the handle
            pose_handle.position.y = userdata.door_detection_data_in_base_link.door_handle.pose.position.y + 0.2  # refer to upper comment
            pose_handle.position.z = 0.0  # we dont need the Z for moving
            userdata.door_handle_pose_goal = pose_handle
        else:  # open door
            # if it's open... just cross it?
            (r, p, theta) = euler_from_quaternion(( userdata.door_detection_data_in_base_link.door_position.pose.orientation.x,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.y,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.z,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.w))  # gives back r, p, y
            theta += 3.1416  # the orientation of the door is looking towards the robot, we need the inverse
            pose_handle.orientation = Quaternion(*quaternion_from_euler(0, 0, theta))  # orientation to look parallel to the door
            pose_handle.position.x = userdata.door_detection_data_in_base_link.door_position.pose.position.x + 1.0 # enter the door
            pose_handle.position.y = userdata.door_detection_data_in_base_link.door_position.pose.position.y
            userdata.door_handle_pose_goal = pose_handle


        rospy.loginfo("Move base goal: \n" + str(pose_handle))

        return succeeded
Exemplo n.º 14
0
    def ocall(self, data):
        twist = Twist()
        twist.angular.x = 0
        twist.angular.y = 0
        twist.angular.z = 0
        twist.linear.x = 0
        twist.linear.y = 0
        twist.linear.z = 0


        # print data
        if self.state == "Turn":
            self.odom = data
            self.old_euler = euler_from_quaternion([data.pose.pose.orientation.x, data.pose.pose.orientation.y, data.pose.pose.orientation.z,data.pose.pose.orientation.w])
            self.state = "Turning"

        # delta = data.pose.pose.orientation.z-self.odom.pose.pose.orientation.z
        euler = euler_from_quaternion([data.pose.pose.orientation.x, data.pose.pose.orientation.y, data.pose.pose.orientation.z,data.pose.pose.orientation.w])
        delta = euler[2]-self.old_euler[2]
        if abs(delta) > 3.14:
            delta = abs(delta)-6.28
        if self.state == "Turning":
            if abs(self.Z) >= .3:
                self.Z = self.Z*((1.785-abs(delta))/1.785)
                twist.angular.z = self.Z
                self.pub.publish(twist)
        # print euler[2], self.old_euler[2]

        print euler[2], delta
        if self.state == "Turning" and abs(delta) >= 1.5:
            self.X = self.Xo
            self.Z = self.Zo
            self.pub2.publish('Done')
            self.pub.publish(twist)
            self.callback("test none")
Exemplo n.º 15
0
 def updateImu(self, imu):
     self.imu_last_update = rospy.Time.now()
     self.imu_init = True
     
     if not self.imu_data :
         angle = euler_from_quaternion([imu.orientation.x, imu.orientation.y, imu.orientation.z, imu.orientation.w])
         imu_data =  PyKDL.Rotation.RPY(angle[0], angle[1], angle[2])
         imu_data = self.imu_tf.M * imu_data
         angle = imu_data.GetRPY()   
         self.last_imu_orientation = angle
         self.last_imu_update = imu.header.stamp
         self.imu_data = True
     else:
         angle = euler_from_quaternion([imu.orientation.x, imu.orientation.y, imu.orientation.z, imu.orientation.w])
         imu_data =  PyKDL.Rotation.RPY(angle[0], angle[1], angle[2])
         imu_data = self.imu_tf.M * imu_data
         angle = imu_data.GetRPY()   
         angle_quaternion = tf.transformations.quaternion_from_euler(angle[0], angle[1], angle[2])
         self.odom.pose.pose.orientation.x = angle_quaternion[0]
         self.odom.pose.pose.orientation.y = angle_quaternion[1]
         self.odom.pose.pose.orientation.z = angle_quaternion[2]
         self.odom.pose.pose.orientation.w = angle_quaternion[3]
         
         # Derive angular velocities from orientations ####################### 
         period = (imu.header.stamp - self.last_imu_update).to_sec()
         self.odom.twist.twist.angular.x = cola2_lib.normalizeAngle(angle[0] - self.last_imu_orientation[0]) / period 
         self.odom.twist.twist.angular.y = cola2_lib.normalizeAngle(angle[1] - self.last_imu_orientation[1]) / period 
         self.odom.twist.twist.angular.z = cola2_lib.normalizeAngle(angle[2] - self.last_imu_orientation[2]) / period 
         self.last_imu_orientation = angle
         self.last_imu_update = imu.header.stamp          
         #####################################################################
         
         if self.makePrediction():
             self.ekf.updatePrediction()
             self.publishData()
Exemplo n.º 16
0
 def tf_trigger(self, reference_frame, target_frame, tfs):
     #  this function is responsible for setting up the triggers for recording
     # on the bagfile.
     
     # sequence for calculating distance and yaw rotation for defining if a 
     # recording trigger is set according to the trigger value on the yaml file
     
     self.tfL.waitForTransform(reference_frame, target_frame, rospy.Time(0), rospy.Duration(3.0))
     trans, rot = self.tfL.lookupTransform(reference_frame, target_frame, rospy.Time(0))
     
     x = trans[0] - self.current_translation[target_frame][0]
     y = trans[1] - self.current_translation[target_frame][1]
     
     distance_trans = math.sqrt(x*x + y*y)
     distance_rot = abs(euler_from_quaternion(rot)[2] - euler_from_quaternion(self.current_rotation[target_frame])[2])
     
     if("trigger_record_translation" in tfs and distance_trans >= tfs["trigger_record_translation"]):
         rospy.loginfo("triggered for translation, trans = " + str(distance_trans))
         self.current_translation[target_frame] = trans
         self.current_rotation[target_frame] = rot
         return "triggered"
         
     if("trigger_record_rotation" in tfs and distance_rot >= tfs["trigger_record_rotation"]):
         rospy.loginfo("triggered for rotation, rot = " + str(distance_rot))
         self.current_translation[target_frame] = trans
         self.current_rotation[target_frame] = rot
         return "triggered"
     
     return "not_triggered"
Exemplo n.º 17
0
def turn_around():
    global turning
    global pos
    global currz
    command = Twist()
    send_command = rospy.ServiceProxy('constant_command', ConstantCommand)
    w = pos.pose.pose.orientation.w
    z = pos.pose.pose.orientation.z
    x_theta_new,y_theta_new,z_theta_new = euler_from_quaternion([0,0,z,w])

    target = get_new_orientation()
    currOrientation = z_theta_new

    if(currOrientation + target < 0):
	zgoal = 3
    else:
	zgoal = -3

    while(z_theta_new < (target - .1) or z_theta_new > (target + .1)):
        if(command.angular.z != zgoal):
	    command.angular.z = zgoal
	    send_command(command)
	w = pos.pose.pose.orientation.w
        z = pos.pose.pose.orientation.z
        x_theta_new,y_theta_new,z_theta_new = euler_from_quaternion([0,0,z,w])
    command.angular.z = 0
    send_command(command)
    rospy.sleep(.1)
    command.linear.x = .3
    send_command(command)
    currz = pos.pose.pose.position.x + pos.pose.pose.position.y
Exemplo n.º 18
0
    def receiveRealPose(self, data):
        
        try:
            (trans, rot) = self.listener.lookupTransform('world', 'minefield', rospy.Time(0))
        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
          return
	
	q = [rot[0], rot[1], rot[2], rot[3]]
	roll, pitch, yaw =  euler_from_quaternion(q)
        
        #frame = PyKDL.Frame(PyKDL.Rotation.RPY(roll, pitch, yaw), PyKDL.Vector(trans[0],trans[1],trans[2]))
        
	q = [data.pose.pose.orientation.x, data.pose.pose.orientation.y, data.pose.pose.orientation.z, data.pose.pose.orientation.w]
	roll, pitch, robot_yaw =  euler_from_quaternion(q)
  
	#robot = PyKDL.Frame(PyKDL.Rotation.RPY(roll, pitch, robot_yaw), PyKDL.Vector(data.pose.pose.position.x, data.pose.pose.position.y, data.pose.pose.position.z))

	#robot_transformed = frame * robot;
	
	#[roll, pitch, yaw] = robot_transformed.M.GetRPY()
	
	#print data.pose.pose.position.x - trans[0], data.pose.pose.position.y - trans[1], robot_yaw - yaw

	pose = [data.pose.pose.position.x - trans[0], data.pose.pose.position.y - trans[1], robot_yaw - yaw]
	self.updateRobotPose(pose)
Exemplo n.º 19
0
    def publish(self, data):
        q = Quaternion()
        q.x = 0
        q.y = 0
        q.z = data[6]
        q.w = data[7]
        odomMsg = Odometry()
        odomMsg.header.frame_id = self._odom
        odomMsg.header.stamp = rospy.get_rostime()
        odomMsg.child_frame_id = self._baseLink
        odomMsg.pose.pose.position.x = data[0]
        odomMsg.pose.pose.position.y = data[1]
        odomMsg.pose.pose.position.z = 0
        odomMsg.pose.pose.orientation = q
        if self._firstTimePub:
            self._prevOdom = odomMsg
            self._firstTimePub = False
            return

        velocity = Twist()

        deltaTime = odomMsg.header.stamp.to_sec() - self._prevOdom.header.stamp.to_sec()
        yaw, pitch, roll = euler_from_quaternion(
            [odomMsg.pose.pose.orientation.w, odomMsg.pose.pose.orientation.x, odomMsg.pose.pose.orientation.y,
             odomMsg.pose.pose.orientation.z])
        prevYaw, prevPitch, prevRollprevYaw = euler_from_quaternion(
            [self._prevOdom.pose.pose.orientation.w, self._prevOdom.pose.pose.orientation.x,
             self._prevOdom.pose.pose.orientation.y, self._prevOdom.pose.pose.orientation.z])
        if deltaTime > 0:
            velocity.linear.x = (data[8] / deltaTime)

        deltaYaw = yaw - prevYaw

        # rospy.loginfo("yaw: %f\t\tpevYaw: %f\t\tdeltaYaw: %f" % (yaw,prevYaw,deltaYaw))

        if deltaYaw > math.pi: deltaYaw -= 2 * math.pi
        elif deltaYaw < -math.pi: deltaYaw += 2 * math.pi

        if deltaTime > 0:
            velocity.angular.z = -(deltaYaw / deltaTime)

        # rospy.loginfo("deltaYaw after check: %f\t\t angular: %f" % (deltaYaw, velocity.angular.z))

        odomMsg.twist.twist = velocity

        self._prevOdom = odomMsg

        traMsg = TransformStamped()
        traMsg.header.frame_id = self._odom
        traMsg.header.stamp = rospy.get_rostime()
        traMsg.child_frame_id = self._baseLink
        traMsg.transform.translation.x = data[0]
        traMsg.transform.translation.y = data[1]
        traMsg.transform.translation.z = 0
        traMsg.transform.rotation = q

        self._pub.publish(odomMsg)
        self._broadCase.sendTransformMessage(traMsg)
Exemplo n.º 20
0
    def updateImu(self, imu):
        config = self.config
        config.imu_last_update = imu.header.stamp #rospy.Time.now()
        config.imu_init = True
        
        if not config.imu_data :
            angle = euler_from_quaternion([imu.orientation.x, imu.orientation.y, imu.orientation.z, imu.orientation.w])
            imu_data =  PyKDL.Rotation.RPY(angle[0], angle[1], angle[2])
            imu_data = config.imu_tf.M * imu_data
            angle = imu_data.GetRPY()   
            config.last_imu_orientation = angle
            config.last_imu_update = imu.header.stamp
            
            # Initialize heading buffer in order to apply a savitzky_golay derivation
            if len(config.heading_buffer) == 0:
                config.heading_buffer.append(angle[2])
                
            inc = cola2_lib.normalizeAngle(angle[2] - config.heading_buffer[-1])
            config.heading_buffer.append(config.heading_buffer[-1] + inc)
            
            if len(config.heading_buffer) == len(config.savitzky_golay_coeffs):
                config.imu_data = True
        else:
            angle = euler_from_quaternion([imu.orientation.x, imu.orientation.y, imu.orientation.z, imu.orientation.w])
            imu_data =  PyKDL.Rotation.RPY(angle[0], angle[1], angle[2])
            imu_data = config.imu_tf.M * imu_data
            angle = imu_data.GetRPY()   
            angle_quaternion = tf.transformations.quaternion_from_euler(angle[0], angle[1], angle[2])
            config.odom.pose.pose.orientation.x = angle_quaternion[0]
            config.odom.pose.pose.orientation.y = angle_quaternion[1]
            config.odom.pose.pose.orientation.z = angle_quaternion[2]
            config.odom.pose.pose.orientation.w = angle_quaternion[3]
            
            # Derive angular velocities from orientations ####################### 
            period = (imu.header.stamp - config.last_imu_update).to_sec()
            
            # For yaw rate we apply a savitzky_golay derivation
            inc = cola2_lib.normalizeAngle(angle[2] - config.heading_buffer[-1])
            config.heading_buffer.append(config.heading_buffer[-1] + inc)
            config.heading_buffer.pop(0)
            config.odom.twist.twist.angular.z = convolve(config.heading_buffer, config.savitzky_golay_coeffs, mode='valid') / period

            # TODO: Roll rate and Pitch rate should be also savitzky_golay derivations
            config.odom.twist.twist.angular.x = cola2_lib.normalizeAngle(angle[0] - config.last_imu_orientation[0]) / period 
            config.odom.twist.twist.angular.y = cola2_lib.normalizeAngle(angle[1] - config.last_imu_orientation[1]) / period 

            config.last_imu_orientation = angle
            config.last_imu_update = imu.header.stamp          
            #####################################################################
            
            try:
                self.LOCK.acquire()
                if self.makePrediction(imu.header.stamp):
                    self.filter.updatePrediction()
                    self.publishData()
            finally:
                self.LOCK.release()
Exemplo n.º 21
0
def callback(human_data, robot_data):
    # TODO: Calculate yaw
    human_quaternion = [human_data.pose.pose.orientation.x, human_data.pose.pose.orientation.y, human_data.pose.pose.orientation.z, human_data.pose.pose.orientation.w]
    robot_quaternion = [robot_data.pose.pose.orientation.x, robot_data.pose.pose.orientation.y, robot_data.pose.pose.orientation.z, robot_data.pose.pose.orientation.w]

    human_yaw = euler_from_quaternion(human_quaternion)[2]
    robot_yaw = euler_from_quaternion(robot_quaternion)[2]

    print("%s,%s,%s,%s,%s,%s,%s" %(human_data.header.stamp, human_data.pose.pose.position.x,human_data.pose.pose.position.y, human_yaw,robot_data.pose.pose.position.x,robot_data.pose.pose.position.y, robot_yaw))
Exemplo n.º 22
0
    def receiveOdomEKF(self, odor):
        xm ,ym, (pitchm, rollm, yawm) = 549848.663622 , 4448426.10338, euler_from_quaternion([0.,0.,0.999998918808,0.00147050426938])

        q = [odor.pose.pose.orientation.x, odor.pose.pose.orientation.y, odor.pose.pose.orientation.z, odor.pose.pose.orientation.w]
        roll, pitch, yaw =  euler_from_quaternion(q)

        x, y = odor.pose.pose.position.x, odor.pose.pose.position.y
        x1 = (x-xm)*cos(yawm) + (y-ym)*sin(yawm)
        y1 = -(x-xm)*sin(yawm) + (y-ym)*cos(yawm)
        self.odometria = [x1, y1, yaw-yawm]
        self.updateRobotPose(self.odometria)
Exemplo n.º 23
0
def convert_push_trial_to_feat_vectors(trial):
    Z = []
    Y = []
    for i in xrange(len(trial.trial_trajectory)-1):
        cts_t0 = trial.trial_trajectory[i]
        cts_t1 = trial.trial_trajectory[i+1]
        [_, _, ee_phi_t0] = euler_from_quaternion(np.array([cts_t0.ee.orientation.x,
                                                            cts_t0.ee.orientation.y,
                                                            cts_t0.ee.orientation.z,
                                                            cts_t0.ee.orientation.w]))
        [_, _, ee_phi_t1] = euler_from_quaternion(np.array([cts_t1.ee.orientation.x,
                                                            cts_t1.ee.orientation.y,
                                                            cts_t1.ee.orientation.z,
                                                            cts_t1.ee.orientation.w]))
        # TODO: This will depend on the primitive used
        u_phi_world = cts_t0.u.angular.x

        z_t = [cts_t0.x.x,
               cts_t0.x.y,
               cts_t0.x.theta,
               cts_t0.ee.position.x,
               cts_t0.ee.position.y,
               cts_t0.ee.position.z,
               ee_phi_t0,
               cts_t0.u.linear.x,
               cts_t0.u.linear.y,
               cts_t0.u.linear.z,
               u_phi_world]

        # Convert world frame feats into object frame
        z_t_obj_frame = get_object_frame_features(cts_t0, ee_phi_t0)
        # TODO: Convert shape into frame for each instance?
        # TODO: Need to extract this first...
        z_shape = trial.trial_start.shape_descriptor
        z_t.extend(z_t_obj_frame)
        z_t.extend(z_shape)

        # World frame
        y_t = [cts_t1.x.x - cts_t0.x.x,
               cts_t1.x.y - cts_t0.x.y,
               cts_t1.x.theta - cts_t0.x.theta,
               cts_t1.ee.position.x - cts_t0.ee.position.x,
               cts_t1.ee.position.y - cts_t0.ee.position.y,
               cts_t1.ee.position.z - cts_t0.ee.position.z,
               ee_phi_t1 - ee_phi_t0,
               cts_t1.t - cts_t0.t]

        # Transform targets into object frame here...
        y_t_obj_frame = get_object_frame_targets(cts_t0, cts_t1)
        y_t.extend(y_t_obj_frame)

        Z.append(z_t)
        Y.append(y_t)
    return (Z, Y)
Exemplo n.º 24
0
    def get_rot_matrix(self, rev=False):
        frm, to = 'ardrone/odom', 'ardrone/ardrone_base_bottomcam'
        if rev:
            frm, to = to, frm

        self.tf.waitForTransform(frm, to, rospy.Time(0), rospy.Duration(3))
        trans, rot = self.tf.lookupTransform(frm, to, rospy.Time(0))

        x, y, z = euler_from_quaternion(rot)
        yaw = euler_from_quaternion(rot, axes='szxy')[0]

        return yaw, np.array(euler_matrix(-x, -y, z))
Exemplo n.º 25
0
    def increment_reference(self):
        '''
        Steps the model reference (trajectory to track) by one self.timestep.

        '''
        # Frame management quantities
        R_ref = trns.quaternion_matrix(self.q_ref)[:3, :3]
        y_ref = trns.euler_from_quaternion(self.q_ref)[2]

        # Convert body PD gains to world frame
        kp = R_ref.dot(self.kp_body).dot(R_ref.T)
        kd = R_ref.dot(self.kd_body).dot(R_ref.T)

        # Compute error components (desired - reference), using "smartyaw"
        p_err = self.p_des - self.p_ref
        v_err = -self.v_ref
        w_err = -self.w_ref
        if npl.norm(p_err) <= self.heading_threshold:
            q_err = trns.quaternion_multiply(self.q_des, trns.quaternion_inverse(self.q_ref))
        else:
            q_direct = trns.quaternion_from_euler(0, 0, np.angle(p_err[0] + (1j * p_err[1])))
            q_err = trns.quaternion_multiply(q_direct, trns.quaternion_inverse(self.q_ref))
        y_err = trns.euler_from_quaternion(q_err)[2]

        # Combine error components into error vectors
        err = np.concatenate((p_err, [y_err]))
        errdot = np.concatenate((v_err, [w_err]))
        wrench = (kp.dot(err)) + (kd.dot(errdot))

        # Compute world frame thruster matrix (B) from thruster geometry, and then map wrench to thrusts
        B = np.concatenate((R_ref.dot(self.thruster_directions.T), R_ref.dot(self.lever_arms.T)))
        B_3dof = np.concatenate((B[:2, :], [B[5, :]]))
        command = self.thruster_mapper(wrench, B_3dof)
        wrench_saturated = B.dot(command)

        # Use model drag to find drag force on virtual boat
        twist_body = R_ref.T.dot(np.concatenate((self.v_ref, [self.w_ref])))
        D_body = np.zeros_like(twist_body)
        for i, v in enumerate(twist_body):
            if v >= 0:
                D_body[i] = self.D_body_positive[i]
            else:
                D_body[i] = self.D_body_negative[i]
        drag_ref = R_ref.dot(D_body * twist_body * abs(twist_body))

        # Step forward the dynamics of the virtual boat
        self.a_ref = (wrench_saturated[:2] - drag_ref[:2]) / self.mass_ref
        self.aa_ref = (wrench_saturated[5] - drag_ref[2]) / self.inertia_ref
        self.p_ref = self.p_ref + (self.v_ref * self.timestep)
        self.q_ref = trns.quaternion_from_euler(0, 0, y_ref + (self.w_ref * self.timestep))
        self.v_ref = self.v_ref + (self.a_ref * self.timestep)
        self.w_ref = self.w_ref + (self.aa_ref * self.timestep)
Exemplo n.º 26
0
def odomCallback(msg):
    global firstTime
    global targetPoint
    global targetQuat
    if not reverse:
        if len(prevPoints) == 0:
            prevPoints.append(msg.pose.pose.position)
            prevQuats.append(msg.pose.pose.orientation)
        point0 = prevPoints[-1]
        point1 = msg.pose.pose.position
        dist = math.sqrt(math.pow(point0.x - point1.x, 2) + math.pow(point0.y - point1.y, 2))
        if(dist > threshDist):
            prevPoints.append(point1)
            prevQuats.append(msg.pose.pose.orientation)
    else:
        if firstTime:
            targetPoint = prevPoints.pop()
            targetQuat = prevQuats.pop()
            firstTime = False
        currPoint = msg.pose.pose.position
        currQuat = msg.pose.pose.orientation
        quaternion = (currQuat.x, currQuat.y, currQuat.z, currQuat.w)
        euler = euler_from_quaternion(quaternion)
        currYaw = euler[2]
        quaternion = (targetQuat.x, targetQuat.y, targetQuat.z, targetQuat.w)
        euler = euler_from_quaternion(quaternion)
        targetYaw = euler[2]

        tw = Twist()
        #print str(abs(currYaw - targetYaw))
        if abs(currYaw - targetYaw) < 0.1:
            if abs(targetPoint.x - currPoint.x) < 0.1 and abs(targetPoint.y - currPoint.y) < 0.1:
                if len(prevPoints) != 0:
                    targetPoint = prevPoints.pop()
                    targetQuat = prevQuats.pop()
                tw.linear.x = 0
                tw.angular.z = 0
            else:
                tw.linear.x = -0.2
                tw.angular.z = 0
        else:
            print str(radDeg(currYaw)) + ", " +  str(radDeg(targetYaw))
            # TODO fix rotation direction to rotate optimally
            if radDeg(targetYaw - currYaw) > 180.0: 
                tw.angular.z = 0.5
            else:
                tw.angular.z = -0.5
            tw.linear.x = 0
        pub.publish(tw)
Exemplo n.º 27
0
    def listen_imu_quaternion(self, imu):
        """Doesn't work b/c desired yaw is too far from possible yaw"""
        current_orientation = np.array([imu.orientation.x,
                                        imu.orientation.y,
                                        imu.orientation.z,
                                        imu.orientation.w])
        # current_angular_speed = np.array([imu.angular_velocity.x,
        #                                   imu.angular_velocity.y,
        #                                   imu.angular_velocity.z])
        diff_orientation = transformations.quaternion_multiply(
            self.desired_orientation,
            # Unit quaternion => inverse = conjugate / norm = congugate
            transformations.quaternion_conjugate(current_orientation))

        assert np.allclose(
            transformations.quaternion_multiply(diff_orientation,
                                                current_orientation),
            self.desired_orientation)

        # diff_r, diff_p, diff_y = transformations.euler_from_quaternion(
        #     diff_orientation)
        # rospy.loginfo("Orientation error (quaternion): {}".format(diff_orientation))
        # rospy.loginfo(
        #     "Orientation error (deg): {}".format(
        #         np.rad2deg([diff_r, diff_p, diff_y]))
        # )
        # out = self.pitch_controller(diff_p)

        corrected_orientation = transformations.quaternion_multiply(
            quaternion_power(diff_orientation, 1.5),
            self.desired_orientation)
        rospy.loginfo(
            "Desired orientation (deg): {}".format(
                np.rad2deg(transformations.euler_from_quaternion(
                    self.desired_orientation))))
        rospy.loginfo(
            "Corrected orientation (deg): {}".format(
                np.rad2deg(transformations.euler_from_quaternion(
                    corrected_orientation))))

        rospy.loginfo("Desired position: {}".format(
            self.desired_position))

        setpoint_pose = Pose(self.desired_position, corrected_orientation)

        servo_angles = self.platform.ik(setpoint_pose)
        rospy.logdebug(
            "Servo angles (deg): {}".format(np.rad2deg(servo_angles)))
        self.publish_servo_angles(servo_angles)
 def process_relative_speed(self,new_segment, segment_input,last_segment):
     pos      = np.array([segment_input.pose.pose.position.x,segment_input.pose.pose.position.y,0,0])
     last_pos = np.array([last_segment.pose.pose.position.x,last_segment.pose.pose.position.y,0,0])
     angle = euler_from_quaternion([segment_input.pose.pose.orientation.x,
                                    segment_input.pose.pose.orientation.y,
                                    segment_input.pose.pose.orientation.z,
                                    segment_input.pose.pose.orientation.w])[2]
     last_angle = euler_from_quaternion([last_segment.pose.pose.orientation.x,
                                         last_segment.pose.pose.orientation.y,
                                         last_segment.pose.pose.orientation.z,
                                         last_segment.pose.pose.orientation.w])[2]
     [a,b] = frame_change(pos,last_pos,angle, last_angle, self.delta)
     new_segment.twist.twist.linear.x = a[0]  #linear speed in direction of the person
     new_segment.twist.twist.linear.y = a[1]  #lateral speed at perpendicular of direction of the person
     new_segment.twist.twist.angular.y = b    #angular yaw speed
Exemplo n.º 29
0
def saveMapToFile(map, yaml_file, image_file, negate, free_thresh,
                  occupied_thresh):

    map_info = {}

    map_info['resolution'] = map.info.resolution

    origin = [0.0, 0.0, 0.0]
    origin[0] = map.info.origin.position.x 
    origin[1] = map.info.origin.position.y 
    a = euler_from_quaternion([
        map.info.origin.orientation.x,
        map.info.origin.orientation.y,
        map.info.origin.orientation.z,
        map.info.origin.orientation.w
    ])
    origin[2] = a[2] #yaw
    map_info['origin'] = origin

    map_info['negate'] = 1 if negate else 0
    map_info['occupied_thresh'] = occupied_thresh
    map_info['free_thresh'] = free_thresh

    map_info['image'] = image_file
    if image_file[0] != '/': 
        yaml_file_dir = os.path.dirname(os.path.realpath(yaml_file))
        image_file = yaml_file_dir + '/' + image_file
    saveMapToImageFile(map, image_file, negate, free_thresh, occupied_thresh)

    with open(yaml_file, "w") as outfile:
        outfile.write(yaml.dump(map_info))
        outfile.close()
Exemplo n.º 30
0
 def odometryCb(self,msg):
     #print 'llega mensaje de odometria'
     self.x=msg.pose.pose.position.x    
     self.y=msg.pose.pose.position.y
     self.ang=euler_from_quaternion([msg.pose.pose.orientation.x,msg.pose.pose.orientation.y,msg.pose.pose.orientation.z,msg.pose.pose.orientation.w])[2]
     self.realSpeed=msg.twist.twist.linear.x
     self.realTurn=msg.twist.twist.angular.z
 def quaternion_to_euler_yaw(self, orientation):
     _, _, yaw = euler_from_quaternion(
         (orientation.x, orientation.y, orientation.z, orientation.w))
     return yaw
Exemplo n.º 32
0
def get_euler(pose):
    """Returns the roll, pitch yaw angles from a Quaternion """
    return transformations.euler_from_quaternion([
        pose.orientation.x, pose.orientation.y, pose.orientation.z,
        pose.orientation.w
    ])
Exemplo n.º 33
0
	def rotation_manager(self, imu_data):

		self.check_station_keep_timer()

		if self.navigation_indicator == True and len(self.current_coordinates)==2:

			global roll, pitch, yaw

			q = imu_data.orientation
			# print(q)

			q_list = [q.x, q.y, q.z, q.w]

			# To convert from quat to euler angles
			(roll, pitch, yaw) = euler_from_quaternion (q_list)
			# print("ROLL")
			# print(roll)

			# print("PITCH")
			# print(pitch)

			# print("YAW")
			# print(yaw)

			# To convert from euler to quat
			# quat = quaternion_from_euler (roll, pitch, yaw)
			# print quat
			

			# if self.imu_orientation == "NED":
			# 	yaw_rad = atan2(2.0 * (q.y*-q.x + q.w*q.z), q.w*q.w - q.z*q.z - q.y*q.y + q.x*q.x)
			# else:
			# 	yaw_rad = atan2(2.0 * (q.y*q.z + q.w*q.x), q.w*q.w - q.x*q.x - q.y*q.y + q.z*q.z)
			
			# yaw_rad = atan2(2.0 * (q.y*q.z + q.w*q.x), q.w*q.w - q.x*q.x - q.y*q.y + q.z*q.z)

			yaw_rad = atan2(2.0 * (q.y*-q.x + q.w*q.z), q.w*q.w - q.z*q.z - q.y*q.y + q.x*q.x)
			# print("yaw rad")
			# print(yaw_rad)

			# yaw_deg = (yaw_rad+3.14159/2)*-180/3.14159
			yaw_deg = yaw_rad*(180/3.14159)
			# yaw_deg -= imu_degree_offset_bias # YAW in terms of north, gets larger when going cw

			# print("yaw_deg: " + str(yaw_deg))

			
			# yaw_deg *= -1
			# yaw_deg += 180
			# print("yaw deg")
			# print(yaw_deg)
			# print("yaw_deg post_process")

			theta_offset = self.caclulate_rotation(self.current_coordinates, self.desired_coordinates, yaw_deg)
			theta_offset *= -1
			theta_offset += self.imu_degree_offset_bias

			# print("Theta Offset")
			# print(theta_offset)

			# theta_offset -= 180

			# if theta_offset > 180:
			# 	theta_offset -= 360
			# elif theta_offset < -180:
			# 	theta_offset += 360
			# print(theta_offset)

			if -5 < theta_offset < 5:
				self.degree_offset = 0
				# print("no degree offset")
			else:
				self.degree_offset = theta_offset
				# print(self.degree_offset)

			# print("Theta OFfset")
			# print(theta_offset)

			# print("Distance:")
			# print(self.distance)

		self.print_progress()
Exemplo n.º 34
0
def getYaw(quat):
    try:
        return euler_from_quaternion([quat.x, quat.y, quat.z, quat.w])[2]
    except AttributeError:
        return euler_from_quaternion(quat)[2]
Exemplo n.º 35
0
	def position_callback(self, msg):
		quaternion = [msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w]
		self.yaw = euler_from_quaternion(quaternion)[2]
Exemplo n.º 36
0
            trans_front_side = tfBuffer.lookup_transform(
                'camera', 'front_side', rospy.Time(), rospy.Duration(1))
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException):
            rate.sleep()
            continue
        timer = trans_front_side.header.stamp.secs

        if (rospy.get_time() - timer) < 1.5:
            bot_pos = geometry_msgs.msg.Vector3()

            quat = [
                trans_front_side.transform.rotation.x,
                trans_front_side.transform.rotation.y,
                trans_front_side.transform.rotation.z,
                trans_front_side.transform.rotation.w
            ]
            pitch, yaw, roll = euler_from_quaternion(quat)

            bot_pos.x = trans_front_side.transform.translation.z
            bot_pos.y = yaw  #CCW angle off pos x from dump bucket corner, still needs stepper transform
            bot_pos.z = trans_front_side.transform.translation.x - 0.25
            if math.fabs(roll) > 2:
                bot_pos.z = -bot_pos.z
                bot_pos.y = -yaw
        else:
            bot_pos.y = -42

        pospub.publish(bot_pos)
        rate.sleep()
Exemplo n.º 37
0
 def get_rotation (self,orientation_q):
     orientation_list = [orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w]
     (roll, pitch, yaw) = euler_from_quaternion (orientation_list)
     return yaw
Exemplo n.º 38
0
    def optimizeManeuver(self):
        '''
        Sets up the optimization problem then calculates the optimal maneuver
        poses. Publishes the resulting path if the optimization is successful.

        Args:
        -----
        msg: ROS Bool message

        Returns:
        --------
        path: the optimal path as a ROS nav_msgs/Path message
        '''
        # Make sure a target pose exists
        if (self.target_x is not None):
            # Grab the current pose from the recent transform if there is no 'odom' topic being published to
            if (self.current_pose is None):
                # DEBUG:
                print(
                    "No 'odom' message received. Waiting for transform from 'odom' to 'base_link'..."
                )
                listener = tf.TransformListener()
                try:
                    listener.waitForTransform('/odom', '/base_link',
                                              rospy.Time(0),
                                              rospy.Duration(10))
                    (trans,
                     rot) = listener.lookupTransform('/odom', '/base_link',
                                                     rospy.Time(0))

                    self.current_pose = Pose()
                    self.current_pose.position.x = trans[0]
                    self.current_pose.position.y = trans[1]
                    self.current_pose.position.z = trans[2]
                    self.current_pose.orientation.x = rot[0]
                    self.current_pose.orientation.y = rot[1]
                    self.current_pose.orientation.z = rot[2]
                    self.current_pose.orientation.w = rot[3]
                except (tf.LookupException, tf.ConnectivityException,
                        tf.ExtrapolationException):
                    return False, "Error looking up transform from 'odom' to 'base_link'"

            # DEBUG:
            print("Running maneuver optimization...")
            # Initial value for optimization
            x0 = [self.start_x_s, self.start_y_s]
            lower_bounds = [-2, -16]
            upper_bounds = [20, 5]

            # Set params
            # TODO: add the forklifts current pose from "/odom"
            current_pose2D = Pose2D()
            current_pose2D.x = self.current_pose.position.x
            current_pose2D.y = self.current_pose.position.y
            euler_angles = euler_from_quaternion([
                self.current_pose.orientation.x,
                self.current_pose.orientation.y,
                self.current_pose.orientation.z,
                self.current_pose.orientation.w
            ])
            current_pose2D.theta = euler_angles[2]

            params = {
                "current_pose":
                [current_pose2D.x, current_pose2D.y, current_pose2D.theta],
                "forklift_length": (self.base_to_back + self.base_to_clamp),
                "weights": [10, 1, 0.1, 1],
                "obstacles":
                self.obstacles,
                "min_radius":
                self.min_radius
            }

            print("Using optimization method: %d" % self.optimization_method)

            #==================================================================#
            # vvv Add Autograd gradient functions here if you get to it vvv
            #==================================================================#
            # Generate Gradient Functions
            self.grad_maneuverObjective = grad(
                lambda x: self.maneuverObjective(x, params))
            self.hessian_maneuverObjective = hessian(
                lambda x: self.maneuverObjective(x, params))
            self.jac_maneuverIneqConstraints = jacobian(
                lambda x: self.maneuverIneqConstraints(x, params))
            self.hessian_maneuverIneqConstraints = hessian(
                lambda x: self.maneuverIneqConstraints(x, params))

            # # Test Gradients against finite difference method
            # delta = 0.0000001
            # x = np.array([self.start_x_s, self.start_y_s], dtype=np.float)
            # dx = deepcopy(x)
            # dx[0] = x[0] + delta
            # print("Objective: ")
            # print(self.maneuverObjective(x, params))
            # print(self.maneuverObjective(dx, params))
            # print("Autograd:")
            # print(self.grad_maneuverObjective(x))
            # print("Finite Difference:")
            # print((self.maneuverObjective(dx, params) - self.maneuverObjective(x, params))/delta)
            # print("Hessian:")
            # print(self.hessian_maneuverObjective(x))
            # print("Autograd con:")
            # print(self.jac_maneuverIneqConstraints(x))
            # print("Constraint Jacobian:")
            # print(self.gradManeuverIneqConstraints(x, params))
            # print("Hessian con:")
            # print(self.hessian_maneuverIneqConstraints(x))
            #==================================================================#
            # ^^^ Add Autograd gradient functions here if you get to it ^^^
            #==================================================================#

            #==================================================================#
            # scipy.optimize.minimize optimizer
            #==================================================================#
            if (self.optimization_method == 1):
                # Set up optimization problem
                obj = lambda x: self.maneuverObjective(x, params)
                obj_bfgs = lambda x: self.maneuverObjective(x, params)
                ineq_con = {
                    'type': 'ineq',
                    'fun': lambda x: self.maneuverIneqConstraints(x, params),
                    'jac': None
                }
                bounds = [(lower_bounds[0], upper_bounds[0]),
                          (lower_bounds[1], upper_bounds[1])]

                # Optimize
                tic = time.time()
                #res = minimize(obj, x0, jac=self.grad_maneuverObjective, method='SLSQP', bounds=bounds, constraints=ineq_con)
                #res = minimize(obj, x0, method='SLSQP', bounds=bounds, constraints=ineq_con)
                res = minimize(obj_bfgs,
                               x0,
                               method='COBYLA',
                               bounds=bounds,
                               constraints=ineq_con)
                toc = time.time()

                # DEBUG:
                print("===== Optimization Results =====")
                print("time: %f(sec)" % (toc - tic))
                print("Success: %s" % res.success)
                print("Message: %s" % res.message)
                print("Results:\n  x: %f,  y: %f" % (res.x[0], res.x[1]))

                # Store result
                x_s = res.x[0]
                y_s = res.x[1]

                # Update starting point to be the current result
                self.start_x_s = x_s
                self.start_y_s = y_s

                self.optimization_success = res.success
                message = res.message
            #==================================================================#
            # scipy.optimize.minimize optimizer
            #==================================================================#

            #==================================================================#
            # IPOPT Optimizer
            #==================================================================#
            if (self.optimization_method == 2):
                # Initial value for optimization
                x0_ip = np.array([x0[0], x0[1]])

                nvar = 2
                x_L = np.array(lower_bounds, dtype=np.float_)
                x_U = np.array(upper_bounds, dtype=np.float_)

                ncon = 1
                g_L = np.array([0], dtype=np.float_)
                g_U = np.array([0], dtype=np.float_)

                nnzj = nvar * ncon
                nnzh = nvar**2

                def eval_f(x):
                    return self.maneuverObjective(x, params)

                def eval_grad_f(x):
                    return self.grad_maneuverObjective(x)

                def eval_g(x):
                    return self.maneuverIneqConstraints(x, params)

                def eval_jac_g(x, flag):
                    if flag:
                        rows = np.concatenate(
                            (np.ones(nvar) * 0, np.ones(nvar) * 1))
                        cols = np.concatenate(
                            (np.linspace(0, nvar - 1, nvar),
                             np.linspace(nvar, 2 * nvar - 1, nvar)))
                        return (rows, cols)
                    else:
                        return self.jac_maneuverIneqConstraints(x)

                def eval_h(x, lagrange, obj_factor, flag):
                    if flag:
                        rows = np.array([])
                        for i in range(nvar * ncon):
                            rows = np.concatenate((rows, np.ones(nvar) * i))
                        cols = np.array([])
                        for i in range(nvar * ncon):
                            cols = np.concatenate(
                                (cols, np.linspace(0, nvar - 1, nvar)))
                        return (rows, cols)
                    else:
                        constraint_hessian = self.hessian_maneuverIneqConstraints(
                            x)
                        constraint_sum = lagrange[0] * constraint_hessian[
                            0, :, :]
                        constraint_sum = constraint_sum + lagrange[
                            1] * constraint_hessian[1, :, :]
                        return obj_factor * self.hessian_maneuverObjective(
                            x) + constraint_sum

                # Not using hessian, remove this line when using it
                nnzh = 0
                nlp = pyipopt.create(nvar, x_L, x_U, ncon, g_L, g_U, nnzj,
                                     nnzh, eval_f, eval_grad_f, eval_g,
                                     eval_jac_g)
                pyipopt.set_loglevel(0)

                tic = time.time()
                x, zl, zu, constraint_multipliers, obj, status = nlp.solve(
                    x0_ip)
                nlp.close()
                toc = time.time()

                def print_variable(variable_name, value):
                    for i in range(len(value)):
                        print("{} {}".format(
                            variable_name + "[" + str(i) + "] =", value[i]))

                print("Solution of the primal variables, x")
                print_variable("x", x)
                #
                # print("Solution of the bound multipliers, z_L and z_U")
                # print_variable("z_L", zl)
                # print_variable("z_U", zu)
                #
                # print("Solution of the constraint multipliers, lambda")
                # print_variable("lambda", constraint_multipliers)
                #
                # print("Objective value")
                # print("f(x*) = {}".format(obj))

                # DEBUG:
                print("===== Optimization Results (IPOPT) =====")
                print("time: %f" % (toc - tic))
                print("Success: %s" % status)
                print("Message: %s" % "")
                print("Results:\n  x: %f,  y: %f" % (x[0], x[1]))

                # Store result
                x_s = x[0]
                y_s = x[1]

                self.optimization_success = (status > 0)
                message = "ipopt optimization finished with status: {0:d}".format(
                    status)
            #==================================================================#
            # IPOPT Optimizer
            #==================================================================#

            #=================================================================#
            # Use hardcoded value
            #=================================================================#
            if (self.optimization_method == 0):
                x_s = x0[0]
                y_s = x0[1]

                self.optimization_success = 1
                message = "used hardcoded starting value"
            #=================================================================#
            # Use hardcoded value
            #=================================================================#

            # Print optimized point
            print("Approach starting point: (%0.4f, %0.4f)" % (x_s, y_s))

            # Set initial pose angle for the forklift to be the direction facing the roll
            theta_s = np.arctan2(self.target_y - y_s, self.target_x - x_s)

            # Initialize path messages
            current_time = rospy.Time.now()
            path1_msg = Path()
            path2_msg = Path()
            path1_gear_msg = PathWithGear()
            path2_gear_msg = PathWithGear()
            path1_msg.header.stamp = current_time
            path1_msg.header.frame_id = "odom"
            path2_msg.header.stamp = current_time
            path2_msg.header.frame_id = "odom"
            path1_gear_msg.path.header.stamp = current_time
            path1_gear_msg.path.header.frame_id = "odom"
            path2_gear_msg.path.header.stamp = current_time
            path2_gear_msg.path.header.frame_id = "odom"

            # Publish first segment of maneuver
            # NOTE: Just set the path to be a single point at the current position. This will make the master controller work the same and quickly move through the two maneuver paths
            point = PoseStamped()
            point.header.frame_id = "odom"
            point.pose.position.x = self.current_pose.position.x
            point.pose.position.y = self.current_pose.position.y
            path1_msg.poses.append(point)
            path1_gear_msg.path.poses.append(point)
            # Set gear, positive alpha = forward gear
            self.path1_pub.publish(path1_msg)
            path1_gear_msg.gear = 1
            self.path1_gear_pub.publish(path1_gear_msg)

            # Publish second segment of maneuver
            point = PoseStamped()
            point.header.frame_id = "odom"
            point.pose.position.x = self.current_pose.position.x
            point.pose.position.y = self.current_pose.position.y
            path2_msg.poses.append(point)
            path2_gear_msg.path.poses.append(point)
            # Set gear, positive alpha = forward gear
            self.path2_pub.publish(path2_msg)
            path2_gear_msg.gear = 1
            self.path2_gear_pub.publish(path2_gear_msg)

            if (self.optimization_success):
                # If optimization was successful, publish the new target
                # position for the A* algorithm (you will want to make this a
                # separate "goal" value distinct from the roll target position)
                rospy.set_param("/control_panel_node/goal_x", float(x_s))
                rospy.set_param("/control_panel_node/goal_y", float(y_s))
                self.update_obstacle_end_pose = False

                # Publish the starting pose for the approach b-spline path
                approach_start_pose = PoseStamped()
                approach_start_pose.header.frame_id = "/odom"
                approach_start_pose.pose.position.x = x_s
                approach_start_pose.pose.position.y = y_s
                quat_forklift = quaternion_from_euler(0, 0, wrapToPi(theta_s))
                approach_start_pose.pose.orientation.x = quat_forklift[0]
                approach_start_pose.pose.orientation.y = quat_forklift[1]
                approach_start_pose.pose.orientation.z = quat_forklift[2]
                approach_start_pose.pose.orientation.w = quat_forklift[3]

                self.approach_pose_pub.publish(approach_start_pose)

            return self.optimization_success, message

        else:
            return False, "No target pose exists"
Exemplo n.º 39
0
def updateOdom(odom):
    global robotLocation
    global currentHeading
    robotLocation = np.array([odom.pose.pose.position.x, odom.pose.pose.position.y])
    orientation = euler_from_quaternion((odom.pose.pose.orientation.x, odom.pose.pose.orientation.y, odom.pose.pose.orientation.z, odom.pose.pose.orientation.w))
    currentHeading = orientation[2]
initialise_pose_position()  #should only be used to initialised
initialise_pose_orientation()  #should only be used to initialised

#obs_move = [4,4,4,3,3,3,0,0,0,0,0,0,0,7,2,2,7,4,7,4,7,4,7,4,0,0,7,4,7,4,7,4,7]
obs_move = [
    3, 7, 2, 10, 11, 3, 0, 5, 2, 7, 7, 9, 3, 9, 7, 8, 10, 6, 8, 9, 1, 4, 2, 0,
    11, 2, 10, 7, 2, 5
]
for i, index in enumerate(obs_move):
    print(index)
    moveSawyer(index)
moveTo(final_pose)
test.unregister()
'''
if not moveSawyer(index):
		print("Invalid Path")
		break
		'''
print("Endpoint cartesian")
cart = [pose.position.x, pose.position.y, pose.position.z]
print(cart)
print("Endpoint orientation")
orient = [
    pose.orientation.x, pose.orientation.y, pose.orientation.z,
    pose.orientation.w
]
print(orient)
print("Endpoint orientation in Euler")
euler = list(euler_from_quaternion(orient))
print(euler)
Exemplo n.º 41
0
def get_yaw_from_msg(msg):
    q = [msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w]
    euler = transformations.euler_from_quaternion(q)
    return euler[2]
Exemplo n.º 42
0
def callback(data):
    global NEIGHBORHOOD_LENGTH
    global NUM_BOXES
    global STEP_SIZE
    global PUB
    global K
    global BUFFER

    startTime = datetime.now()
    boxes_to_buffer = int(math.ceil(BUFFER / NEIGHBORHOOD_LENGTH * NUM_BOXES))

    #print("LOCAL RRT IS CALLING BACK REGARDING OCCUPANCY")
    #print(np.asarray(data.grid_path).shape)
    grid_map = np.asarray(data.grid_path).reshape(NUM_BOXES, NUM_BOXES)
    grid_map = np.flipud(grid_map)
    buffered_map = bufferizeManhattan(grid_map, boxes_to_buffer)

    x = data.current_odometry.pose.pose.position.x
    y = data.current_odometry.pose.pose.position.y
    euler = euler_from_quaternion(
        (data.current_odometry.pose.pose.orientation.x,
         data.current_odometry.pose.pose.orientation.y,
         data.current_odometry.pose.pose.orientation.z,
         data.current_odometry.pose.pose.orientation.w))
    yaw = euler[2]
    #print("let's make the local rrt")
    localRRT = LocalRRT(buffered_map.astype(int), K, [x, y], yaw,
                        [data.next_point.x, data.next_point.y])
    localRRT_no_buffer = LocalRRT(grid_map.astype(int), K, [x, y], yaw,
                                  [data.next_point.x, data.next_point.y])
    #print("do we need to run local rrt?")
    if not (localRRT_no_buffer.local_map.isValidEdge(
        [x, y], [data.next_point.x, data.next_point.y],
            print_statements=True)):
        print("let's run the local rrt")
        local_path = localRRT.runRRT(STEP_SIZE, 100).reshape((-1, 2))

        while (len(local_path) < 2):
            print("THIS PATH IS TOO SHORT")
            break
            boxes_to_buffer = boxes_to_buffer - 1
            if (boxes_to_buffer < 0):
                print("COULD NOT FIND A PATH")
                break
            buffered_map = bufferizeManhattan(grid_map, boxes_to_buffer)
            localRRT = LocalRRT(buffered_map.astype(int), K, [x, y], yaw,
                                [data.next_point.x, data.next_point.y])
            local_path = localRRT.runRRT(STEP_SIZE, 100).reshape((-1, 2))

        # Send this update
        result_msg = local_rrt_result()
        result_msg.follow_local_path = True
        local_path = np.flipud(local_path)
        result_msg.global_path_x = local_path[:, 0].tolist()
        result_msg.global_path_y = local_path[:, 1].tolist()
        result_msg.next_point = data.next_point
        #print("NEW LOCAL PATH")
        #print(local_path)

        PUB.publish(result_msg)
        return
    else:
        result_msg = local_rrt_result()
        result_msg.follow_local_path = False
        PUB.publish(result_msg)
        print("No need to update")
def moveSawyer(index):
    if index == 0:
        pose.position.x += 0.05
    elif index == 1:
        pose.position.x -= 0.05
    elif index == 2:
        pose.position.y += 0.05
    elif index == 3:
        pose.position.y -= 0.05
    elif index == 4:
        pose.position.z += 0.05
    elif index == 5:
        pose.position.z -= 0.05
    #can't seem to make it move properly.
    elif index == 6:
        #atm it euler rotates at 10 degrees
        temp_euler = list(
            euler_from_quaternion([
                pose.orientation.x, pose.orientation.y, pose.orientation.z,
                pose.orientation.w
            ]))
        temp_euler[0] += 0.2618  #5 degrees
        temp_ori = quaternion_from_euler(temp_euler[0], temp_euler[1],
                                         temp_euler[2])
        pose.orientation.x = temp_ori[0]
        pose.orientation.y = temp_ori[1]
        pose.orientation.z = temp_ori[2]
        pose.orientation.w = temp_ori[3]
    elif index == 7:
        temp_euler = list(
            euler_from_quaternion([
                pose.orientation.x, pose.orientation.y, pose.orientation.z,
                pose.orientation.w
            ]))
        temp_euler[0] -= 0.2618
        temp_ori = quaternion_from_euler(temp_euler[0], temp_euler[1],
                                         temp_euler[2])
        pose.orientation.x = temp_ori[0]
        pose.orientation.y = temp_ori[1]
        pose.orientation.z = temp_ori[2]
        pose.orientation.w = temp_ori[3]
    elif index == 8:
        temp_euler = list(
            euler_from_quaternion([
                pose.orientation.x, pose.orientation.y, pose.orientation.z,
                pose.orientation.w
            ]))
        temp_euler[1] += 0.2618  #0.0873 5 degrees
        temp_ori = quaternion_from_euler(temp_euler[0], temp_euler[1],
                                         temp_euler[2])
        pose.orientation.x = temp_ori[0]
        pose.orientation.y = temp_ori[1]
        pose.orientation.z = temp_ori[2]
        pose.orientation.w = temp_ori[3]
    elif index == 9:
        temp_euler = list(
            euler_from_quaternion([
                pose.orientation.x, pose.orientation.y, pose.orientation.z,
                pose.orientation.w
            ]))
        temp_euler[1] -= 0.2618  #0.0873
        temp_ori = quaternion_from_euler(temp_euler[0], temp_euler[1],
                                         temp_euler[2])
        pose.orientation.x = temp_ori[0]
        pose.orientation.y = temp_ori[1]
        pose.orientation.z = temp_ori[2]
        pose.orientation.w = temp_ori[3]
    elif index == 10:
        temp_euler = list(
            euler_from_quaternion([
                pose.orientation.x, pose.orientation.y, pose.orientation.z,
                pose.orientation.w
            ]))
        temp_euler[2] += 0.2618
        temp_ori = quaternion_from_euler(temp_euler[0], temp_euler[1],
                                         temp_euler[2])
        pose.orientation.x = temp_ori[0]
        pose.orientation.y = temp_ori[1]
        pose.orientation.z = temp_ori[2]
        pose.orientation.w = temp_ori[3]
    elif index == 11:
        temp_euler = list(
            euler_from_quaternion([
                pose.orientation.x, pose.orientation.y, pose.orientation.z,
                pose.orientation.w
            ]))
        temp_euler[2] -= 0.2618
        temp_ori = quaternion_from_euler(temp_euler[0], temp_euler[1],
                                         temp_euler[2])
        pose.orientation.x = temp_ori[0]
        pose.orientation.y = temp_ori[1]
        pose.orientation.z = temp_ori[2]
        pose.orientation.w = temp_ori[3]

    if moveTo(pose):
        return True
    else:
        return False
def get_rotation (msg):
	global roll, pitch, yaw 
	orientation_q = msg.pose.pose.orientation
	orientation_list = [orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w]
	(roll, pitch, yaw) = euler_from_quaternion (orientation_list)
Exemplo n.º 45
0
def transform_to_list(tf):
    a = euler_from_quaternion(msg_to_quaternion(tf.rotation))
    t = tf.translation
    return [t.x, t.y, t.z, a[0], a[1], a[2]]
Exemplo n.º 46
0
    def __init__(self):
        # Read parameters to configure the node
        tf_publish_rate = read_parameter('~tf_publish_rate', 50.)
        tf_period = 1. / tf_publish_rate if tf_publish_rate > 0 else float(
            'inf')
        parent_frame = read_parameter('~parent_frame', 'world')
        optitrack_frame = read_parameter('~optitrack_frame', 'optitrack')
        rigid_bodies = read_parameter('~rigid_bodies', dict())
        names = []
        ids = []
        for name, value in rigid_bodies.items():
            names.append(name)
            ids.append(value)
        # Setup Publishers
        pose_pub = rospy.Publisher('/optitrack/rigid_bodies',
                                   RigidBodyArray,
                                   queue_size=3)
        # Setup TF listener and broadcaster
        tf_listener = tf.TransformListener()
        tf_broadcaster = tf.TransformBroadcaster()
        # Connect to the optitrack system
        iface = read_parameter('~iface', 'eth1')
        version = (2, 7, 0, 0)  # the latest SDK version
        #IPython.embed()
        #optitrack = rx.mkdatasock(ip_address=get_ip_address(iface))
        #Modified by Nikhil
        # optitrack = rx.mkdatasock(ip_address=iface)
        optitrack = rx.mkcmdsock(ip_address=iface)
        msg = struct.pack("I", rx.NAT_PING)
        server_address = '192.168.1.205'
        result = optitrack.sendto(msg, (server_address, rx.PORT_COMMAND))

        rospy.loginfo('Successfully connected to optitrack')
        # Get transformation from the world to optitrack frame
        (parent, child) = (parent_frame, optitrack_frame)
        try:
            now = rospy.Time.now() + rospy.Duration(1.0)
            tf_listener.waitForTransform(parent, child, now,
                                         rospy.Duration(5.0))
            (pos, rot) = tf_listener.lookupTransform(parent, child, now)
            wTo = PoseConv.to_homo_mat(pos, rot)  # return 4x4 numpy mat
        except (tf.Exception, tf.LookupException, tf.ConnectivityException):
            rospy.logwarn('Failed to get transformation from %r to %r frame' %
                          (parent, child))
            parent_frame = optitrack_frame
            wTo = np.eye(4)
        # Track up to 24 rigid bodies
        prevtime = np.ones(24) * rospy.get_time()
        # IPython.embed()
        while not rospy.is_shutdown():
            try:
                # data = optitrack.recv(rx.MAX_PACKETSIZE)
                data, address = optitrack.recvfrom(rx.MAX_PACKETSIZE + 4)
            except socket.error:
                if rospy.is_shutdown():  # exit gracefully
                    return
                else:
                    rospy.logwarn('Failed to receive packet from optitrack')
            msgtype, packet = rx.unpack(data, version=version)
            if type(packet) is rx.SenderData:
                version = packet.natnet_version
                rospy.loginfo('NatNet version received: ' + str(version))
            if type(packet) in [rx.SenderData, rx.ModelDefs, rx.FrameOfData]:
                # Optitrack gives the position of the centroid.
                array_msg = RigidBodyArray()
                # IPython.embed()
                if msgtype == rx.NAT_FRAMEOFDATA:
                    # IPython.embed()
                    for i, rigid_body in enumerate(packet.rigid_bodies):
                        body_id = rigid_body.id
                        pos_opt = np.array(rigid_body.position)
                        rot_opt = np.array(rigid_body.orientation)
                        # IPython.embed()
                        oTr = PoseConv.to_homo_mat(pos_opt, rot_opt)
                        # Transformation between world frame and the rigid body
                        WTr = np.dot(wTo, oTr)
                        wTW = np.array([[0, -1, 0, 0], [1, 0, 0, 0],
                                        [0, 0, 1, 0], [0, 0, 0, 1]])
                        wTr = np.dot(wTW, WTr)

                        ## New Change ##
                        # Toffset = np.array( [[0, 1, 0, 0],[0, 0, 1, 0],[1, 0, 0, 0],[0, 0, 0, 1]] )
                        # tf_wTr = np.dot(oTr,Toffset)
                        # tf_pose = Pose()
                        # tf_pose.position = Point(*tf_wTr[:3,3])
                        # tf_pose.orientation = Quaternion(*tr.quaternion_from_matrix(tf_wTr))

                        # IPython.embed()

                        array_msg.header.stamp = rospy.Time.now()
                        array_msg.header.frame_id = parent_frame
                        body_msg = RigidBody()
                        pose = Pose()
                        pose.position = Point(*wTr[:3, 3])
                        # OTr = np.dot(oTr,Toffset)
                        # pose.orientation = Quaternion(*tr.quaternion_from_matrix(oTr))

                        # change 26 Feb. 2017
                        # get the euler angle we want then compute the new quaternion
                        euler = tr.euler_from_quaternion(rot_opt)
                        quaternion = tr.quaternion_from_euler(
                            euler[2], euler[0], euler[1])
                        pose.orientation.x = quaternion[0]
                        pose.orientation.y = quaternion[1]
                        pose.orientation.z = quaternion[2]
                        pose.orientation.w = quaternion[3]

                        body_msg.id = body_id
                        body_msg.tracking_valid = rigid_body.tracking_valid
                        body_msg.mrk_mean_error = rigid_body.mrk_mean_error
                        body_msg.pose = pose
                        for marker in rigid_body.markers:
                            # TODO: Should transform the markers as well
                            body_msg.markers.append(Point(*marker))
                        array_msg.bodies.append(body_msg)
                        # Control the publish rate for the TF broadcaster
                        if rigid_body.tracking_valid and (
                                rospy.get_time() - prevtime[body_id] >=
                                tf_period):
                            body_name = 'rigid_body_%d' % (body_id)
                            if body_id in ids:
                                idx = ids.index(body_id)
                                body_name = names[idx]
                            ## no change ##
                            # tf_broadcaster.sendTransform(pos_opt, rot_opt, rospy.Time.now(), body_name, optitrack_frame)
                            # change 1 ##
                            # pos2 = np.array([tf_pose.position.x, tf_pose.position.y, tf_pose.position.z])
                            # rot2 = np.array([tf_pose.orientation.x, tf_pose.orientation.y, tf_pose.orientation.z, tf_pose.orientation.w])
                            # tf_broadcaster.sendTransform(pos2, rot2, rospy.Time.now(), body_name, optitrack_frame)
                            ## change 2 ## <fail>
                            # pos2 = np.array([-pose.position.y, pose.position.x, pose.position.z])
                            # rot2 = np.array([-pose.orientation.y, pose.orientation.x, pose.orientation.z, pose.orientation.w])
                            # tf_broadcaster.sendTransform(pos2, rot2, rospy.Time.now(), body_name, optitrack_frame)
                            ## change 3 ## <fail>
                            # pos2 = np.array([-pos_opt[1],pos_opt[0],pos_opt[2]])
                            # rot2 = np.array([-rot_opt[1],rot_opt[0],rot_opt[2],rot_opt[3]])
                            # tf_broadcaster.sendTransform(pos2, rot2, rospy.Time.now(), body_name, optitrack_frame)
                            ## change 4 ## <>
                            pos2 = np.array([
                                pose.position.x, pose.position.y,
                                pose.position.z
                            ])
                            rot2 = np.array([
                                pose.orientation.x, pose.orientation.y,
                                pose.orientation.z, pose.orientation.w
                            ])
                            tf_broadcaster.sendTransform(
                                pos2, rot2, rospy.Time.now(), body_name,
                                parent_frame)

                            prevtime[body_id] = rospy.get_time()

                pose_pub.publish(array_msg)
Exemplo n.º 47
0
        self.orientation_queue = []
        rospy.set_param('docking', True)
        print("Service request received.")
        return "Service requested."


if __name__ == "__main__":
    my_filter = Pose_filter()
    while (not rospy.is_shutdown()):
        # if STATION_NR is provided
        if (my_filter.marker_pose_calibrated.pose.position.x
                and rospy.get_param('docking')):
            # implementing a temporal sliding window filter here
            [position_vec, orient_vec
             ] = my_filter.vec_from_pose(my_filter.marker_pose_calibrated.pose)
            euler_vec = euler_from_quaternion(orient_vec)
            my_filter.position_queue.append(position_vec)
            my_filter.orientation_queue.append(orient_vec)
            filtered_position_vec = my_filter.avr(my_filter.position_queue)
            filtered_orient_vec = my_filter.avr(my_filter.orientation_queue)
            filtered_pose = my_filter.pose_from_vec(filtered_position_vec,
                                                    filtered_orient_vec)
            my_filter.filtered_pose_pub.publish(filtered_pose)
            #my_filter.filtered_pose_pub.publish(my_filter.marker_pose_calibrated)
            # discard the oldest element in queue
            if (len(my_filter.position_queue) == 15):
                my_filter.position_queue.pop(0)
                my_filter.orientation_queue.pop(0)
        # provide an output to remind user of starting docking at correct position(rosservice call)
        # wouldn't be necessary if choose to change docking into autonomous process
        elif (my_filter.marker_list
gps_n = xyz()
dist = []
myrosbag = rosbags[inp]
print("Working on: %s" % (myrosbag))
bag = rosbag.Bag(myrosbag, "r")
count = 0
for topic, msg, t in bag.read_messages(topics=[
        '/current_pose', '/current_pose_alt', '/gps/duro/current_pose',
        '/gps/nova/current_pose'
]):
    if topic == '/current_pose':
        gps_p.x.append(msg.pose.position.x)
        gps_p.y.append(msg.pose.position.y)
        gps_p.t.append(t.to_sec())
        _, _, yaw = euler_from_quaternion([
            msg.pose.orientation.x, msg.pose.orientation.y,
            msg.pose.orientation.z, msg.pose.orientation.w
        ])
        gps_p.ori.append(yaw)
    elif topic == '/current_pose_alt':
        gps_a.x.append(msg.pose.position.x)
        gps_a.y.append(msg.pose.position.y)
        gps_a.t.append(t.to_sec())
        _, _, yaw = euler_from_quaternion([
            msg.pose.orientation.x, msg.pose.orientation.y,
            msg.pose.orientation.z, msg.pose.orientation.w
        ])
        gps_a.ori.append(yaw)
        if len(gps_p.x):  # if not empty
            ## 20 Hz = 0.05 second
            time_margin = 0.018
            if abs(gps_p.t[-1] - t.to_sec()) < time_margin:
Exemplo n.º 49
0
    def stageCB(self, data):
        #if len(data.laser.ranges) < 1 or self.readyForNewData == False:
        #print "limitted by GPU !!!!"
        # return

        # pose stuff
        self.robotPose = data.position.pose
        quatOri = (self.robotPose.orientation.x, self.robotPose.orientation.y,
                   self.robotPose.orientation.z, self.robotPose.orientation.w)
        (_, _, yaw) = transformations.euler_from_quaternion(quatOri)
        robotOri = 180 * yaw / np.pi
        angleBetween = 180 * np.arctan2(
            self.goalPose.position.y - self.robotPose.position.y,
            self.goalPose.position.x - self.robotPose.position.x) / np.pi
        dirToTarget = self.constrainAngle(robotOri - angleBetween)

        self.screen[:] = 128
        x = self.observation_dims[0] / 2
        y = self.observation_dims[1] / 2
        rad = int(data.laser.range_max * 10)

        # Occupancy grid:
        for i in range(0, 360):
            for j in range(0, rad):
                if data.laser.ranges[i] * 10 >= j:
                    self.setPixel(x + int(j * np.cos(np.pi * i / 180.0)),
                                  y + int(j * np.sin(np.pi * i / 180.0)), 192)
            if data.laser.ranges[i] < data.laser.range_max:
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)),
                    y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)),
                    0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)),
                    y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)) -
                    1, 0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)),
                    y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)) -
                    2, 0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)) -
                    1, y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)),
                    0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)) -
                    1, y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)) -
                    1, 0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)) -
                    1, y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)) -
                    2, 0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)) -
                    2, y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)),
                    0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)) -
                    2, y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)) -
                    1, 0)
                self.setPixel(
                    x +
                    int(10 * data.laser.ranges[i] * np.cos(np.pi * i / 180)) -
                    2, y +
                    int(10 * data.laser.ranges[i] * np.sin(np.pi * i / 180)) -
                    2, 0)

        # Gradient:


#    R = np.matrix('{} {}; {} {}'.format(np.cos( np.radians( -dirToTarget)), -np.sin( np.radians( -dirToTarget)), np.sin( np.radians( -dirToTarget)), np.cos( np.radians( -dirToTarget))))
        tempScr = np.zeros(
            (self.observation_dims[0], self.observation_dims[1]))
        for i in range(0, self.observation_dims[0]):
            for j in range(0, self.observation_dims[1]):
                x = int(
                    (i - self.observation_dims[0] / 2) * np.cos(np.pi + yaw) +
                    (j - self.observation_dims[1] / 2) * -np.sin(np.pi + yaw))
                y = int(
                    (i - self.observation_dims[0] / 2) * np.sin(np.pi + yaw) +
                    (j - self.observation_dims[1] / 2) * np.cos(np.pi + yaw))
                tempScr[i][j] = np.sqrt((self.robotPose.position.x + x / 10 -
                                         self.goalPose.position.x)**2 +
                                        (self.robotPose.position.y + y / 10 -
                                         self.goalPose.position.y)**2)
        tempScr -= np.min(tempScr)
        tempScr /= np.max(tempScr)
        for i in range(0, self.observation_dims[0]):
            for j in range(0, self.observation_dims[1]):
                self.addPixel(i, j, 63.0 * (1 - tempScr[i][j]))
        if data.collision == True:
            self.terminal = 1
        self.minFrontDist = data.minFrontDist
        self.readyForNewData = False
Exemplo n.º 50
0
 def odom_callback(self, msg):
     self.x0 = msg.pose.pose.position.x
     self.y0 = msg.pose.pose.position.y
     _, _, self.yaw0 = euler_from_quaternion((msg.pose.pose.orientation.x, msg.pose.pose.orientation.y, msg.pose.pose.orientation.z, msg.pose.pose.orientation.w))
     self.odom_received = True
Exemplo n.º 51
0
def get_yaw(msg):   #radians
    global roll, pitch, yaw
    orientation_q = msg.orientation
    orientation_list = [orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w]
    (roll, pitch, yaw) = euler_from_quaternion (orientation_list)
    return yaw
def getRotation(odomMsg):
    orientation_q = odomMsg.pose.pose.orientation
    orientation_list = [ orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w]
    (roll, pitch, yaw) = euler_from_quaternion(orientation_list)
    return yaw
Exemplo n.º 53
0
    def localizeCallback(self, data):
        mapToLaser = self.tfListener.lookupTransform('/map', '/laser',
                                                     rospy.Time(0))

        toLaserTransMatrix = np.asarray(mapToLaser[0][:2])

        carOrientQTMap = (data.pose.orientation.x, data.pose.orientation.y,
                          data.pose.orientation.z, data.pose.orientation.w)

        carYawMap = euler_from_quaternion(carOrientQTMap)[2]  #yaw

        toLaserRotMatrix = np.array([[np.cos(carYawMap), -np.sin(carYawMap)],
                                     [np.sin(carYawMap),
                                      np.cos(carYawMap)]])

        toLaser = lambda coords: (coords - toLaserTransMatrix).dot(
            toLaserRotMatrix)

        #finding goal waypoint and other metadata
        carPositionMap = np.array([data.pose.position.x, data.pose.position.y])

        carPositionLaser = toLaser(
            carPositionMap)  #this should be the zero vector

        #apply positional extrapolation based on localization delay
        extrap = self.extrapolatePosition()
        self.LOOKAHEAD_DISTANCE = self.decideLookahead()
        print(self.lookSpeedMultiplier())
        print(self.lookAngleMultiplier())

        pathPointsLaser = toLaser(np.asarray(self.path_points))

        distances = np.linalg.norm(pathPointsLaser - carPositionLaser, axis=1)
        mask = np.ones(len(distances), dtype=int)
        viable = np.where(
            np.logical_and((distances >= self.LOOKAHEAD_DISTANCE),
                           (pathPointsLaser[:, 0] >
                            (self.FOV_MULT * pathPointsLaser[:, 1]))))
        mask[viable] = 0

        viableMask = ma.masked_array(distances, mask=mask)
        waypointIndex = viableMask.argmin()
        waypoint = pathPointsLaser[waypointIndex]

        goalX = waypoint[0]
        goalY = waypoint[1]

        #calculate goal angle and velocity
        self.findCurveRadius = lambda distance, offset: distance**2 / (
            2 * np.abs(offset))
        self.turnRadius = self.findCurveRadius(distances[waypointIndex], goalY)
        self.curvature = 1 / self.turnRadius

        self.steeringAngle = np.arcsin(
            self.WHEELBASE / self.turnRadius) * np.sign(goalY)
        self.steeringAngle = np.clip(self.steeringAngle, -self.MAX_TURN_ANGLE,
                                     self.MAX_TURN_ANGLE)

        print("===========================")
        print("X: " + str(goalX))
        print("Y: " + str(goalY))
        print("R: " + str(self.turnRadius))
        print("angle: " + str(self.steeringAngle))
        print("ex: " + str(extrap[0]))
        print("ey: " + str(extrap[1]))
        print("LOOK: " + str(self.decideLookahead()))
        print("SPD: " + str(self.decideVelocity()))
        print("SPD-EST: " + str(self.currentSpeed))
        print("===========================")

        msg = drive_param()
        msg.velocity = self.decideVelocity()
        msg.angle = self.steeringAngle
        self.drivePub.publish(msg)
Exemplo n.º 54
0
 def _get_imu(self, msg):  #listen to /atlas/imu/pose/pose/orientation
     roll, pitch, self._yaw = euler_from_quaternion([
         msg.orientation.x, msg.orientation.y, msg.orientation.z,
         msg.orientation.w
     ])
     self._CD_StateMachine.UpdateOdometryYaw(self._yaw)
Exemplo n.º 55
0
    def process(self):

        global FIND_FACE

        if self.robot_pose is None:
            return

        print("Now State: ", self.state)

        msg_pose = PoseStamped()

        # find human face
        if self.state == 0:
            if self.face_pose == None:
                return
            msg_pose.pose = self.face_pose

            if -0.23 < self.face_pose.position.x < 0.23 and -0.2 < self.face_pose.position.y < 0.2:
                self.state = 1
                self.fg.position.x = self.robot_pose.position.x
                self.fg.position.y = self.robot_pose.position.y
                FIND_FACE = False

        elif self.state == 1:
            msg_pose.pose.position.x = 0
            self.pubtarget.publish(msg_pose)
            print("state 1 sleep")
            time.sleep(7)
            self.state = 2

        # find cream
        elif self.state == 2:
            if self.cream_pose == None:
                return
            msg_pose.pose = self.cream_pose

            print("cx cy", self.cream_pose.position.x, self.cream_pose.position.y)

            if -0.23 < self.cream_pose.position.x < 0.23 and -0.05 < self.cream_pose.position.y < 0.05:
                self.state = 3

        elif self.state == 3:

            print("State 3")
            msg_pose.pose.position.x = 0.1
            msg_pose.pose.position.y = 0
            self.pubtarget.publish(msg_pose)
            time.sleep(3)
            msg_pose.pose.position.x = 0
            self.pubtarget.publish(msg_pose)
            print("state 3 sleep")
            time.sleep(7)
            self.state = 4

        elif self.state == 4:
            if self.face_pose == None:
                return

            if FIND_FACE == True:
                msg_pose.pose = self.face_pose
            else:
                trans = [self.robot_pose.position.x,self.robot_pose.position.y,self.robot_pose.position.z]
                rot = [self.robot_pose.orientation.x,self.robot_pose.orientation.y,self.robot_pose.orientation.z,self.robot_pose.orientation.w]
                transformation_matrix = tr.compose_matrix(angles = tr.euler_from_quaternion(rot), translate = trans)
                trans_c = [self.fg.position.x,self.fg.position.y,0.08]
                rot_c = [0, 0, 0, 1]
                mat_c = tr.compose_matrix(angles = tr.euler_from_quaternion(rot_c), translate = trans_c)
                inv_mat = tr.inverse_matrix(transformation_matrix)
                new_mat = np.dot(inv_mat, mat_c)
                trans_new = tf.transformations.translation_from_matrix(new_mat)
                msg_pose.pose.position.x = trans_new[0]
                msg_pose.pose.position.y = trans_new[1]

            print("face x: ", trans_new[0])
            print("face y: ", trans_new[1])
            if -0.23 < msg_pose.pose.position.x < 0.23 and -0.23 < msg_pose.pose.position.y < 0.2:
                self.state = 5

        elif self.state == 5:
            msg_pose.pose.position.x = 0

        self.pubtarget.publish(msg_pose)
Exemplo n.º 56
0
def modify_pose_rotation(pose,
                         offset=0.0,
                         reference_axis='z',
                         rotation_range=None):
    """
    Modifies the orientation of a pose, for a single rotation axis (reference_axis),
    by adding an offset and limiting the rotation to be within certain range. If no
    rotation_range is specified, the pose is not modified (except for the specified
    offset).

    :param pose: The pose to be modified.
    :type pose: geometry_msgs.msg.PoseStamped

    :param offset: Rotation offset to add to the reference_axis of the pose (in degrees).
    :type offset: float

    :param reference_axis: The rotation axis of the pose to be modified (e.g. x, y, z).
    :type reference_axis: str

    :param rotation_range: If specified, the rotation will be limited until the
        rotation_range value (in degrees). For instance, if rotation_range is set
        to [0, 180], where 0 is the minimum and 180 degrees is the maximum, the
        rotation will remain in half circle (e.g. this parameter can be helpful
        for symmetric objects, since there a 180 degree rotation makes no difference).
    :type rotation_range: list

    :return: The modified pose.
    :rtype: geometry_msgs.msg.PoseStamped

    """
    pose_out = copy.deepcopy(pose)
    orientation_in = (pose_out.pose.orientation.x, pose_out.pose.orientation.y,
                      pose_out.pose.orientation.z, pose_out.pose.orientation.w)

    angles_in = transformations.euler_from_quaternion(orientation_in)
    # Add 360 degrees to the negative angles, since the range of
    # euler_from_quaternion is -PI to PI (-180 to 180 degrees)
    angles_in = [(math.pi * 2) + angle if angle < 0.0 else angle
                 for angle in angles_in]

    euler_angles = {
        'x': angles_in[0],
        'y': angles_in[1],
        'z': angles_in[2],
    }

    # Ensure the offset is not more than 360 degrees
    offset %= 360
    target_angle = euler_angles[reference_axis] + math.radians(offset)

    if rotation_range is not None:
        target_angle = restrict_angle_to_range(
            target_angle, math.radians(offset),
            list(numpy.radians(rotation_range)))

    euler_angles[reference_axis] = target_angle
    angles_out = [euler_angles['x'], euler_angles['y'], euler_angles['z']]

    orientation_out = transformations.quaternion_from_euler(
        angles_out[0], angles_out[1], angles_out[2])

    #pose_out.pose.orientation.x = orientation_out[0]
    #pose_out.pose.orientation.y = orientation_out[1]
    #pose_out.pose.orientation.z = orientation_out[2]
    #pose_out.pose.orientation.w = orientation_out[3]

    #hardcoded way - always same position while lying down

    print 'HORIZONTAL OBJECT'

    # pose_out.pose.orientation.x = -0.71
    # pose_out.pose.orientation.y = -0.7
    # pose_out.pose.orientation.z = 0.0
    # pose_out.pose.orientation.w = 0.0

    pose_out.pose.orientation.x = -0.67559
    pose_out.pose.orientation.y = -0.7372
    pose_out.pose.orientation.z = 0.0
    pose_out.pose.orientation.w = 0.0
    return pose_out
Exemplo n.º 57
0
 def quatToAng3D(self,quat):
     euler = euler_from_quaternion((quat.x,quat.y,quat.z,quat.w))
     return euler
Exemplo n.º 58
0
 def convert_pose_to_xy_and_theta(self, pose):
     """ Convert pose (geometry_msgs.Pose) to a (x,y,yaw) tuple """
     orientation_tuple = (pose.orientation.x, pose.orientation.y,
                          pose.orientation.z, pose.orientation.w)
     angles = t.euler_from_quaternion(orientation_tuple)
     return (pose.position.x, pose.position.y, angles[2])
Exemplo n.º 59
0
 def from_urdf(cls,
               urdf_string,
               transmission_suffix='_thruster_transmission'):
     '''
     Load from an URDF string. Expects each thruster to be connected a transmission ending in the specified suffix.
     A transform between the propeller joint and base_link must be available
     '''
     urdf = URDF.from_xml_string(urdf_string)
     buff = tf2_ros.Buffer()
     listener = tf2_ros.TransformListener(buff)  # noqa
     names = []
     joints = []
     positions = []
     angles = []
     limit = -1
     ratio = -1
     for transmission in urdf.transmissions:
         find = transmission.name.find(transmission_suffix)
         if find != -1 and find + len(transmission_suffix) == len(
                 transmission.name):
             if len(transmission.joints) != 1:
                 raise Exception(
                     'Transmission {} does not have 1 joint'.format(
                         transmission.name))
             if len(transmission.actuators) != 1:
                 raise Exception(
                     'Transmission {} does not have 1 actuator'.format(
                         transmission.name))
             t_ratio = transmission.actuators[0].mechanicalReduction
             if ratio != -1 and ratio != t_ratio:
                 raise Exception(
                     'Transmission {} has a different reduction ratio (not supported)'
                     .format(t_ratio))
             ratio = t_ratio
             joint = None
             for t_joint in urdf.joints:
                 if t_joint.name == transmission.joints[0].name:
                     joint = t_joint
             if joint is None:
                 rospy.logerr('Transmission joint {} not found'.format(
                     transmission.joints[0].name))
             try:
                 trans = buff.lookup_transform('wamv/base_link',
                                               joint.child, rospy.Time(),
                                               rospy.Duration(10))
             except tf2_ros.TransformException as e:
                 raise Exception(e)
             translation = rosmsg_to_numpy(trans.transform.translation)
             rot = rosmsg_to_numpy(trans.transform.rotation)
             yaw = euler_from_quaternion(rot)[2]
             names.append(transmission.name[0:find])
             positions.append(translation[0:2])
             angles.append(yaw)
             joints.append(joint.name)
             if limit != -1 and joint.limit.effort != limit:
                 raise Exception(
                     'Thruster {} had a different limit, cannot proceed'.
                     format(joint.name))
             limit = joint.limit.effort
     limit_tuple = (limit, -limit)
     return cls(names,
                positions,
                angles,
                generate_linear_force_to_command(ratio),
                limit_tuple,
                joints=joints)
Exemplo n.º 60
0
def quaternion_to_rpy(quat):
    quatarray = quaternion_to_array(quat)
    rpyarray = transformations.euler_from_quaternion(quatarray)
    return (rpyarray[0], rpyarray[1], rpyarray[2])