def Publish_Odom_Tf(self):
		
		
	#quaternion = tf.transformations.quaternion_from_euler(0, 0, theta)
		quaternion = Quaternion()
		quaternion.x = 0 
		quaternion.y = 0
		quaternion.z = math.sin(self.Heading / 2.0)
		quaternion.w = math.cos(self.Heading / 2.0)
			
		rosNow = rospy.Time.now()
		self._tf_broad_caster.sendTransform(
				(self.X_Pos, self.Y_Pos, 0), 
					(quaternion.x, quaternion.y, quaternion.z, quaternion.w),
					rosNow,
					"base_footprint",
					"odom"
					)

		# next, we'll publish the odometry message over ROS
		odometry = Odometry()
		odometry.header.frame_id = "odom"
		odometry.header.stamp = rosNow
		odometry.pose.pose.position.x = self.X_Pos
		odometry.pose.pose.position.y = self.Y_Pos
		odometry.pose.pose.position.z = 0
		odometry.pose.pose.orientation = quaternion


		odometry.child_frame_id = "base_link"
		odometry.twist.twist.linear.x = self.Velocity
		odometry.twist.twist.linear.y = 0
		odometry.twist.twist.angular.z = self.Omega
		self._odom_publisher.publish(odometry)
Exemple #2
0
def array_to_quaternion(nparr):
    quat = Quaternion()
    quat.x = nparr[0]
    quat.y = nparr[1]
    quat.z = nparr[2]
    quat.w = nparr[3]
    return quat
def convert_planar_phi_to_quaternion(phi):
    quaternion = Quaternion()
    quaternion.x = 0
    quaternion.y = 0
    quaternion.z = math.sin(phi / 2)
    quaternion.w = math.cos(phi / 2)
    return quaternion
 def get_ros_quaternion(self, almath_quaternion):
     output = Quaternion()
     output.w = almath_quaternion.w
     output.x = almath_quaternion.x
     output.y = almath_quaternion.y
     output.z = almath_quaternion.z
     return output
def list_to_dict(lst):

    """
    Convert a list to a pose dictionary, assuming it is in the order
    x, y, z, orientation x, orientation y, orientation z[, orientation w].
    """

    if len(lst) == 6:
        qtn = tf.transformations.quaternion_from_euler(lst[3], lst[4], lst[5])
    elif len(lst) == 7:
        qtn = Quaternion()
        qtn.x = lst[3]
        qtn.y = lst[4]
        qtn.z = lst[5]
        qtn.w = lst[6]
    else:
        raise MoveItCommanderException("""Expected either 6 or 7 elements
                in list: (x,y,z,r,p,y) or (x,y,z,qx,qy,qz,qw)""")

    pnt = Point()
    pnt.x = lst[0]
    pnt.y = lst[1]
    pnt.z = lst[2]

    pose_dict = {
        'position': pnt,
        'orientation': qtn
    }
    return pose_dict
 def __init__(self):
  position=Point()
  rotation=Quaternion()
  rospy.init_node('tf_listener')
  self.pub = rospy.Publisher('robot_odom', robot_odom, queue_size=10)
  self.now=rospy.Time()
  self.tf_listener=tf.TransformListener()
  rospy.sleep(0.5)
  frame='/odom'
  wantedframe = self.frame_checker(frame)

  while not rospy.is_shutdown():
   (po,ro)=self.get_odom(frame,wantedframe)
   position.x=po[0]
   position.y=po[1]
   position.z=po[2]
   rotation.x=ro[0]
   rotation.y=ro[1]
   rotation.z=ro[2]
   rotation.w=ro[3]
   #rospy.sleep()
   self.publish(position,rotation,wantedframe)

   #print 'position: \n',position,'\n','rotation: \n',self.quat_to_angle(rotation),'time: ', self.robot_odom.header.stamp
   print self.robot_odom,'\nangular: ',self.quat_to_angle(self.robot_odom.rotation)#self.normalize_angle(self.quat_to_angle(self.robot_odom.rotation))
def quaternion_vector2quaternion_ros(quaternion_vector):
  """
  Quaternion: [x, y, z, w]
  """
  quaternion_ros = Quaternion()
  quaternion_ros.x, quaternion_ros.y, quaternion_ros.z, quaternion_ros.w = quaternion_vector
  return quaternion_ros
 def getOrientation(self):
     ort = Quaternion()
     ort.x = self._orientationX
     ort.y = self._orientationY
     ort.z = self._orientationZ
     ort.w = self._orientationW
     return ort
    def poll(self):
        (x, y, theta) = self.arduino.arbot_read_odometry()

        quaternion = Quaternion()
        quaternion.x = 0.0 
        quaternion.y = 0.0
        quaternion.z = sin(theta / 2.0)
        quaternion.w = cos(theta / 2.0)
    
        # Create the odometry transform frame broadcaster.
        now = rospy.Time.now()
        self.odomBroadcaster.sendTransform(
            (x, y, 0), 
            (quaternion.x, quaternion.y, quaternion.z, quaternion.w),
            now,
            "base_link",
            "odom"
            )
    
        odom = Odometry()
        odom.header.frame_id = "odom"
        odom.child_frame_id = "base_link"
        odom.header.stamp = now
        odom.pose.pose.position.x = x
        odom.pose.pose.position.y = y
        odom.pose.pose.position.z = 0
        odom.pose.pose.orientation = quaternion
        
        odom.twist.twist.linear.x = self.forwardSpeed
        odom.twist.twist.linear.y = 0
        odom.twist.twist.angular.z = self.angularSpeed

        self.arduino.arbot_set_velocity(self.forwardSpeed, self.angularSpeed)

        self.odomPub.publish(odom)
def axis_angle_to_quaternion(axis, angle):
    q = Quaternion()
    q.x = axis.x * math.sin(angle / 2.0)
    q.y = axis.y * math.sin(angle / 2.0)
    q.z = axis.z * math.sin(angle / 2.0)
    q.w = math.cos(angle / 2.0)
    return q
Exemple #11
0
def rotateQuaternion(q_orig, yaw):
    """
    Converts a basic rotation about the z-axis (in radians) into the
    Quaternion notation required by ROS transform and pose messages.
    
    :Args:
       | q_orig (geometry_msgs.msg.Quaternion): to be rotated
       | yaw (double): rotate by this amount in radians
    :Return:
       | (geometry_msgs.msg.Quaternion) q_orig rotated yaw about the z axis
     """
    # Create a temporary Quaternion to represent the change in heading
    q_headingChange = Quaternion()

    p = 0
    y = yaw / 2.0
    r = 0
 
    sinp = math.sin(p)
    siny = math.sin(y)
    sinr = math.sin(r)
    cosp = math.cos(p)
    cosy = math.cos(y)
    cosr = math.cos(r)
 
    q_headingChange.x = sinr * cosp * cosy - cosr * sinp * siny
    q_headingChange.y = cosr * sinp * cosy + sinr * cosp * siny
    q_headingChange.z = cosr * cosp * siny - sinr * sinp * cosy
    q_headingChange.w = cosr * cosp * cosy + sinr * sinp * siny

    # Multiply new (heading-only) quaternion by the existing (pitch and bank) 
    # quaternion. Order is important! Original orientation is the second 
    # argument rotation which will be applied to the quaternion is the first 
    # argument. 
    return multiply_quaternions(q_headingChange, q_orig)
def quaternion_to_msg(q):
    msg = Quaternion()
    msg.x = q[0]
    msg.y = q[1]
    msg.z = q[2]
    msg.w = q[3]
    return msg
def update_kalman(ar_tags):
    print "started update"
    rospy.wait_for_service('innovation')
    update = rospy.ServiceProxy('innovation', NuSrv)
    listener = tf.TransformListener()
    while True:
        try:
            try:
               (trans, rot) = listener.lookupTransform(ar_tags['arZ'], ar_tags['ar1'], rospy.Time(0))
            except:
	       print "Couldn't look up transform"
               continue
            lin = Vector3()
            quat = Quaternion()
            lin.x = trans[0]
            lin.y = trans[1]
            lin.z = trans[2]
            quat.x = rot[0]
            quat.y = rot[1]
            quat.z = rot[2]
            quat.w = rot[3]
            transform = Transform()
            transform.translation = lin
            transform.rotation = quat
            test = update(transform, ar_tags['ar1'])
            print "Service call succeeded"
        except rospy.ServiceException, e:
            print "Service call failed: %s"%e
Exemple #14
0
def compare_all(data):
    global k 
    
    last_row = csv_data[k-frequency_offset]

    diff = Quaternion()
    diff.x = round(float(last_row[1])-convert_ppm(data.channels[3]),4)
    diff.y = round(float(last_row[2])-convert_ppm(data.channels[1]),4)
    diff.z = round(float(last_row[4])-convert_ppm(data.channels[0]),4)
    diff.w = round(float(last_row[5])+convert_ppm(data.channels[2]),4)
    
    testing_pub.publish(diff)

    if (k < len(csv_data)):
        row = csv_data[k]
        #log for post processing
        #joystick log
        c = csv.writer(open("/home/jinahadam/catkin_ws/src/testing/gps_joy.csv", "ab"))
        c.writerow([data.header.stamp.secs,row[1],row[2],row[4],row[5]])
        #rc Inn log
        c = csv.writer(open("/home/jinahadam/catkin_ws/src/testing/rc_in.csv", "ab"))
        c.writerow([data.header.stamp.secs,convert_ppm(data.channels[3]),convert_ppm(data.channels[1]),convert_ppm(data.channels[0]),convert_ppm(data.channels[2])])


        msg = Joy()
	msg.axes = (float(row[1]),float(row[2]),float(row[3]),float(row[4]),float(row[5]),float(row[6]),float(row[7]))
        msg.buttons = (0,0,0,0,0,0,0,0,0,0,0)
	k = k + 1
        pub.publish(msg)       
    else:
        #k = 0
        print("Joystick & RC In data logged with GPS Time")
        exit()
Exemple #15
0
    def publish(self, data):
        q = data.getOrientation()
        roll, pitch, yaw = euler_from_quaternion([q.w, q.x, q.y, q.z])

        # print "Before ", "Yaw: " + str(yaw * 180 / pi), "Roll: " + str(roll * 180 / pi), "pitch: " + str(pitch * 180 / pi), "\n\n"

        array = quaternion_from_euler(roll, pitch, yaw + (self._angle * pi / 180))

        res = Quaternion()
        res.w = array[0]
        res.x = array[1]
        res.y = array[2]
        res.z = array[3]

        roll, pitch, yaw = euler_from_quaternion([q.w, q.x, q.y, q.z])

        # print "after ", "Yaw: " + str(yaw * 180 / pi), "Roll: " + str(roll * 180 / pi), "pitch: " + str(pitch * 180 / pi), "\n\n"

        msg = Imu()
        msg.header.frame_id = self._frameId
        msg.header.stamp = rospy.get_rostime()
        msg.orientation = res
        msg.linear_acceleration = data.getAcceleration()
        msg.angular_velocity = data.getVelocity()

        magMsg = MagneticField()
        magMsg.header.frame_id = self._frameId
        magMsg.header.stamp = rospy.get_rostime()
        magMsg.magnetic_field = data.getMagnetometer()

        self._pub.publish(msg)
        self._pubMag.publish(magMsg)
Exemple #16
0
    def publish(self, data):
        if not self._calib and data.getImuMsgId() == PUB_ID:
            q = data.getOrientation()
            roll, pitch, yaw = euler_from_quaternion([q.w, q.x, q.y, q.z])
            array = quaternion_from_euler(roll, pitch, yaw + (self._angle * pi / 180))

            res = Quaternion()
            res.w = array[0]
            res.x = array[1]
            res.y = array[2]
            res.z = array[3]

            msg = Imu()
            msg.header.frame_id = self._frameId
            msg.header.stamp = rospy.get_rostime()
            msg.orientation = res
            msg.linear_acceleration = data.getAcceleration()
            msg.angular_velocity = data.getVelocity()

            magMsg = MagneticField()
            magMsg.header.frame_id = self._frameId
            magMsg.header.stamp = rospy.get_rostime()
            magMsg.magnetic_field = data.getMagnetometer()

            self._pub.publish(msg)
            self._pubMag.publish(magMsg)

        elif data.getImuMsgId() == CALIB_ID:
            x, y, z = data.getValues()
            msg = imuCalib()

            if x > self._xMax:
                self._xMax = x
            if x < self._xMin:
                self._xMin = x

            if y > self._yMax:
                self._yMax = y
            if y < self._yMin:
                self._yMin = y

            if z > self._zMax:
                self._zMax = z
            if z < self._zMin:
                self._zMin = z


            msg.x.data = x
            msg.x.max = self._xMax
            msg.x.min = self._xMin

            msg.y.data = y
            msg.y.max = self._yMax
            msg.y.min = self._yMin

            msg.z.data = z
            msg.z.max = self._zMax
            msg.z.min = self._zMin

            self._pubCalib.publish(msg)
	def on_aseba_odometry_event(self,data): 
		now = data.stamp
		dt = (now-self.then).to_sec()
		self.then = now
		dsl = (data.data[0]*dt)/SPEED_COEF # left wheel delta in mm
		dsr = (data.data[1]*dt)/SPEED_COEF # right wheel delta in mm
		ds = ((dsl+dsr)/2.0)/1000.0      # robot traveled distance in meters
		dth = atan2(dsr-dsl,BASE_WIDTH)  # turn angle

		self.x += ds*cos(self.th+dth/2.0)
		self.y += ds*sin(self.th+dth/2.0)
		self.th+= dth

		# prepare tf from base_link to odom 
		quaternion = Quaternion()
		quaternion.z = sin(self.th/2.0)
		quaternion.w = cos(self.th/2.0)

		# prepare odometry
		self.odom.header.stamp = rospy.Time.now() # OR TO TAKE ONE FROM THE EVENT?
		self.odom.pose.pose.position.x = self.x
		self.odom.pose.pose.position.y = self.y
		self.odom.pose.pose.position.z = 0
		self.odom.pose.pose.orientation = quaternion
		self.odom.twist.twist.linear.x = ds/dt
		self.odom.twist.twist.angular.z = dth/dt

		# publish odometry
		self.odom_broadcaster.sendTransform((self.x,self.y,0),(quaternion.x,quaternion.y,quaternion.z,quaternion.w),self.then,"base_link","odom")
		self.odom_pub.publish(self.odom)
    def get_nearest_cloud(self, msg):
        points = list()
        points_xy = list()
        
        # Get all the points in the visible cloud (may be prefiltered by other nodes)
        for point in point_cloud2.read_points(msg, skip_nans=True):
            points.append(point[:3])
            points_xy.append(point[:2])

        # Convert to a numpy array            
        points_arr = np.float32([p for p in points]).reshape(-1, 1, 3) 
        
        # Compute the COG 
        cog = np.mean(points_arr, 0)
        
        # Convert to a Point
        cog_point = Point()
        cog_point.x = cog[0][0]
        cog_point.y = cog[0][1]
        cog_point.z = cog[0][2]
        #cog_point.z = 0.35
        
        # Abort if we get an NaN in any component
        if np.isnan(np.sum(cog)):
            return
        
        # If we have enough points, find the best fit ellipse around them
        try:
            if len(points_xy) > 6:
                points_xy_arr = np.float32([p for p in points_xy]).reshape(-1, 1, 2)  
                track_box = cv2.fitEllipse(points_xy_arr)
            else:
                # Otherwise, find the best fitting rectangle
                track_box = cv2.boundingRect(points_xy_arr)
            
            angle = pi - radians(track_box[2])
        except:
            return
        
        #print angle
        
        # Convert the rotation angle to a quaternion
        q_angle = quaternion_from_euler(0, angle, 0, axes='sxyz')
        q = Quaternion(*q_angle)
        
        q.x = 0.707
        q.y = 0
        q.z = 0.707
        q.w = 0

        # Publish the COG and orientation
        target = PoseStamped()
        target.header.stamp = rospy.Time.now()
        target.header.frame_id = msg.header.frame_id
        target.pose.position = cog_point
        target.pose.orientation = q
        
        # Publish the movement command
        self.target_pub.publish(target)
Exemple #19
0
def quat_product (qa,qb):
	qc=Quaternion()
  	qc.w = qa.w*qb.w - (qa.x*qb.x + qa.y*qb.y + qa.z*qb.z)
  	qc.x = (qa.w*qb.x + qa.x*qb.w + qa.y*qb.z - qa.z*qb.y)
  	qc.y = (qa.w*qb.y - qa.x*qb.z + qa.y*qb.w + qa.z*qb.x)
  	qc.z = (qa.w*qb.z + qa.x*qb.y - qa.y*qb.x + qa.z*qb.w)

	return qc
    def publish(self, data):
        q = Quaternion()
        q.x = 0
        q.y = 0
        q.z = data[6]
        q.w = data[7]
        odomMsg = Odometry()
        odomMsg.header.frame_id = self._odom
        odomMsg.header.stamp = rospy.get_rostime()
        odomMsg.child_frame_id = self._baseLink
        odomMsg.pose.pose.position.x = data[0]
        odomMsg.pose.pose.position.y = data[1]
        odomMsg.pose.pose.position.z = 0
        odomMsg.pose.pose.orientation = q
        if self._firstTimePub:
            self._prevOdom = odomMsg
            self._firstTimePub = False
            return

        velocity = Twist()

        deltaTime = odomMsg.header.stamp.to_sec() - self._prevOdom.header.stamp.to_sec()
        yaw, pitch, roll = euler_from_quaternion(
            [odomMsg.pose.pose.orientation.w, odomMsg.pose.pose.orientation.x, odomMsg.pose.pose.orientation.y,
             odomMsg.pose.pose.orientation.z])
        prevYaw, prevPitch, prevRollprevYaw = euler_from_quaternion(
            [self._prevOdom.pose.pose.orientation.w, self._prevOdom.pose.pose.orientation.x,
             self._prevOdom.pose.pose.orientation.y, self._prevOdom.pose.pose.orientation.z])
        if deltaTime > 0:
            velocity.linear.x = (data[8] / deltaTime)

        deltaYaw = yaw - prevYaw

        # rospy.loginfo("yaw: %f\t\tpevYaw: %f\t\tdeltaYaw: %f" % (yaw,prevYaw,deltaYaw))

        if deltaYaw > math.pi: deltaYaw -= 2 * math.pi
        elif deltaYaw < -math.pi: deltaYaw += 2 * math.pi

        if deltaTime > 0:
            velocity.angular.z = -(deltaYaw / deltaTime)

        # rospy.loginfo("deltaYaw after check: %f\t\t angular: %f" % (deltaYaw, velocity.angular.z))

        odomMsg.twist.twist = velocity

        self._prevOdom = odomMsg

        traMsg = TransformStamped()
        traMsg.header.frame_id = self._odom
        traMsg.header.stamp = rospy.get_rostime()
        traMsg.child_frame_id = self._baseLink
        traMsg.transform.translation.x = data[0]
        traMsg.transform.translation.y = data[1]
        traMsg.transform.translation.z = 0
        traMsg.transform.rotation = q

        self._pub.publish(odomMsg)
        self._broadCase.sendTransformMessage(traMsg)
    def run(self):
        rosRate = rospy.Rate(self.rate)
        rospy.loginfo("Publishing Odometry data at: " + str(self.rate) + " Hz")

        vx = 0.0
        vy = 0.0
        vth = 0.0


        while not rospy.is_shutdown() and not self.finished.isSet():
            now = rospy.Time.now()         
            if now > self.t_next:
                dt = now - self.then
                self.then = now
                dt = dt.to_sec()

                delta_x = (vx * cos(self.th) - vy * sin(self.th)) * dt
                delta_y = (vx * sin(self.th) + vy * cos(self.th)) * dt
                delta_th = vth * dt

                self.x += delta_x
                self.y += delta_y
                self.th += delta_th


                quaternion = Quaternion()
                quaternion.x = 0.0 
                quaternion.y = 0.0
                quaternion.z = sin(self.th / 2.0)
                quaternion.w = cos(self.th / 2.0)
    
                # Create the odometry transform frame broadcaster.
                self.odomBroadcaster.sendTransform(
                    (self.x, self.y, 0), 
                    (quaternion.x, quaternion.y, quaternion.z, quaternion.w),
                    rospy.Time.now(),
                    "base_link",
                    "odom"
                    )
    
                odom = Odometry()
                odom.header.frame_id = "odom"
                odom.child_frame_id = "base_link"
                odom.header.stamp = now
                odom.pose.pose.position.x = self.x
                odom.pose.pose.position.y = self.y
                odom.pose.pose.position.z = 0
                odom.pose.pose.orientation = quaternion
                odom.twist.twist.linear.x = vx
                odom.twist.twist.linear.y = 0
                odom.twist.twist.angular.z = vth
    
                self.odomPub.publish(odom)
                
                self.t_next = now + self.t_delta
                
            rosRate.sleep()
Exemple #22
0
    def advance(self, dt):
        now = self.then + dt
        elapsed = now - self.then
        self.then = now
        if self.debug:
            print "advancing to t=%5.3f" % (self.then)

        ldist = dt * self.lwSpeed
        rdist = dt * self.rwSpeed

        d = (ldist + rdist)/2.0
        th = (rdist - ldist)/self.distanceBetweenWheels

        dx = d / dt
        dth = th / dt

        if (d != 0):
            x = math.cos(th)*d
            y = math.sin(th)*d # Does this differ from the create driver?  Should it?
            self.x = self.x + (math.cos(self.th)*x - math.sin(self.th)*y)
            self.y = self.y + (math.sin(self.th)*x + math.cos(self.th)*y)

        if (th != 0):
            self.th = self.th + th
            self.th = self.th % (2 * math.pi)
            self.th = self.th - (2 * math.pi) if self.th > math.pi else self.th
                

        quaternion = Quaternion()
        quaternion.x = 0.0
        quaternion.y = 0.0
        quaternion.z = math.sin(self.th/2.0)
        quaternion.w = math.cos(self.th/2.0)

        # Most of the applications I'm working on don't need this.
        # self.odomBroadcaster.sendTransform(
        #     (self.x, self.y, 0),
        #     (quaternion.x, quaternion.y, quaternion.z, quaternion.w),
        #     rospy.Time.now(),
        #     "base_link",
        #     "odom"
        #     )

        odom = Odometry()
        odom.header.stamp = rospy.Time.now()
        odom.header.frame_id = "odom"
        odom.pose.pose.position.x = self.x
        odom.pose.pose.position.y = self.y
        odom.pose.pose.position.z = 0
        odom.pose.pose.orientation = quaternion

        odom.child_frame_id = "base_link"
        odom.twist.twist.linear.x = dx
        odom.twist.twist.linear.y = 0
        odom.twist.twist.angular.z = dth

        self.odomPub.publish(odom)
Exemple #23
0
def quat_normalize(q):
	q1=Quaternion()
  	qnorm = sqrt(q.w*q.w + q.x*q.x + q.y*q.y + q.z*q.z)
  	q1.w = q.w/qnorm
 	q1.x = q.x/qnorm
  	q1.y = q.y/qnorm
  	q1.z = q.z/qnorm

	return q1
Exemple #24
0
 def only_yaw(self, quat, mag=1, add=0):
     _, _, yaw = euler_from_quaternion(self.quat_to_list(quat))
     quat_yaw = quaternion_from_euler(0.0, 0.0, mag * yaw + add)
     ret_quat = Quaternion()
     ret_quat.x = quat_yaw[0]
     ret_quat.y = quat_yaw[1]
     ret_quat.z = quat_yaw[2]
     ret_quat.w = quat_yaw[3]
     return ret_quat
Exemple #25
0
def quat_inverse (q):
	q_inv=Quaternion()
	norm = sqrt(q.w*q.w + q.x*q.x + q.y*q.y + q.z*q.z)
  	q_inv.w = +q.w / norm
  	q_inv.x = -q.x / norm
  	q_inv.y = -q.y / norm
  	q_inv.z = -q.z / norm

	return q_inv
 def _x_y_yaw_to_pose(self, x, y, yaw):
     position = Point(x, y, 0)
     orientation = Quaternion()
     q = quaternion_from_euler(0, 0, yaw)
     orientation.x = q[0]
     orientation.y = q[1]
     orientation.z = q[2]
     orientation.w = q[3]
     return Pose(position=position, orientation=orientation)
Exemple #27
0
    def update(self):
        #############################################################################
        now = rospy.Time.now()
        if now > self.t_next:
            elapsed = now - self.then
            self.then = now
            elapsed = elapsed.to_sec()

            # this approximation works (in radians) for small angles
            th = self.th - self.th_pre

            self.dr = th / elapsed

            # publish the odom information
            quaternion = Quaternion()
            quaternion.x = self.qx
            quaternion.y = self.qy
            quaternion.z = self.qz
            quaternion.w = self.qw
            self.odomBroadcaster.sendTransform(
                (self.x, self.y, 0),
                (0, 0, quaternion.z, quaternion.w),
                rospy.Time.now(),
                self.base_frame_id,
                self.odom_frame_id,
            )

            self.laserBroadcaster.sendTransform(
                (0, 0, 0), (0, 0, 0, 1), rospy.Time.now(), self.laser_frame_id, self.base_frame_id
            )

            odom = Odometry()
            odom.header.stamp = now
            odom.header.frame_id = self.odom_frame_id
            odom.pose.pose.position.x = self.x
            odom.pose.pose.position.y = self.y
            odom.pose.pose.position.z = 0
            odom.pose.pose.orientation = quaternion
            odom.child_frame_id = self.base_frame_id
            odom.twist.twist.linear.x = self.dx
            odom.twist.twist.linear.y = 0
            odom.twist.twist.angular.z = self.dr
            self.odomPub.publish(odom)
            laser = LaserScan()
            laser.header.stamp = now
            laser.header.frame_id = self.laser_frame_id
            laser.angle_min = self.laser.angle_min
            laser.angle_max = self.laser.angle_max
            laser.angle_increment = self.laser.angle_increment
            laser.time_increment = self.laser.time_increment
            laser.scan_time = self.laser.scan_time
            laser.range_min = self.laser.range_min
            laser.range_max = self.laser.range_max
            laser.ranges = self.laser.ranges
            laser.intensities = self.laser.intensities
            self.laserPub.publish(laser)
Exemple #28
0
    def publish_odometry(self):
        quat = tf.transformations.quaternion_from_euler(0, 0, self.yaw)
        quaternion = Quaternion()
        quaternion.x = quat[0]
        quaternion.y = quat[1]
        quaternion.z = quat[2]
        quaternion.w = quat[3]

        if self.last_time is None:
            self.last_time = rospy.Time.now()

        vx = self.vx #(msg->twist.twist.linear.x) *trans_mul_;
        vy = self.vy #msg->twist.twist.linear.y;
        vth = self.vyaw #(msg->twist.twist.angular.z) *rot_mul_;

        current_time = rospy.Time.now()

        dt = (current_time - self.last_time).to_sec()
        delta_x = (vx * math.cos(self.yaw) - vy * math.sin(self.yaw)) * dt
        delta_y = (vx * math.sin(self.yaw) + vy * math.cos(self.yaw)) * dt
        delta_th = vth * dt

        self.x += delta_x
        self.y += delta_y
        self.yaw += delta_th

        odom = Odometry()
        odom.header.stamp = rospy.Time.now()
        odom.header.frame_id = "odom"
        odom.child_frame_id = "base_link"
        odom.pose.pose.position.x = self.x
        odom.pose.pose.position.y = self.y
        odom.pose.pose.position.z = 0.0
        odom.pose.pose.orientation = quaternion
        odom.twist.twist.linear.x = vx
        odom.twist.twist.linear.y = vy
        odom.twist.twist.angular.z = vth

        self.odom_pub.publish(odom)

        transform_stamped = TransformStamped()
        transform_stamped.header = odom.header

        transform_stamped.transform.translation.x = self.x
        transform_stamped.transform.translation.y = self.y
        transform_stamped.transform.translation.z = 0.0
        transform_stamped.transform.rotation = quaternion

        #self.tfbr.sendTransform(transform_stamped)
        self.tfbr.sendTransform((self.x, self.y, 0.0),
                                (quat[0], quat[1], quat[2], quat[3]),
                                rospy.Time.now(),
                                "odom",
                                "base_link")

        self.last_time = current_time
 def quat_from_euler(self,r, p, y):
       quat_g = Quaternion()
       quat_tf = transformations.quaternion_from_euler(r,p,y)
       
       quat_g.x = quat_tf[0]
       quat_g.y = quat_tf[1]
       quat_g.z = quat_tf[2]
       quat_g.w = quat_tf[3]
       
       return quat_g
Exemple #30
0
def quaternion_conjugate(q):

    q_conj = Quaternion()

    q_conj.x = -q.x
    q_conj.y = -q.y
    q_conj.z = -q.z
    q_conj.w = q.w

    return q_conj
Exemple #31
0
def listToQuaternion(l):
    return Quaternion(l[0], l[1], l[2], l[3])
Exemple #32
0
    def update(self):
        #############################################################################
        now = rospy.Time.now()
        if now > self.t_next:
            elapsed = now - self.then
            self.then = now
            elapsed = elapsed.to_sec()

            # calculate odometry
            if self.enc_left == None:
                d_left = 0
                d_right = 0
            else:
                d_left = (self.left - self.enc_left) / self.ticks_meter
                d_right = (self.right - self.enc_right) / self.ticks_meter
            self.enc_left = self.left
            self.enc_right = self.right

            # distance traveled is the average of the two wheels
            d = (d_left + d_right) / 2
            # this approximation works (in radians) for small angles
            th = (d_right - d_left) / self.base_width
            # calculate velocities
            self.dx = d / elapsed
            self.dr = th / elapsed

            if (d != 0):
                # calculate distance traveled in x and y
                x = cos(th) * d
                y = -sin(th) * d
                # calculate the final position of the robot
                self.x = self.x + (cos(self.th) * x - sin(self.th) * y)
                self.y = self.y + (sin(self.th) * x + cos(self.th) * y)
            if (th != 0):
                self.th = self.th + th

            # publish the odom information
            quaternion = Quaternion()
            quaternion.x = 0.0
            quaternion.y = 0.0
            quaternion.z = sin(self.th / 2)
            quaternion.w = cos(self.th / 2)
            # self.odomBroadcaster.sendTransform(
            #     (self.x, self.y, 0),
            #     (quaternion.x, quaternion.y, quaternion.z, quaternion.w),
            #     rospy.Time.now(),
            #     self.base_frame_id,
            #     self.odom_frame_id
            #     )

            odom = Odometry()
            odom.header.stamp = now
            odom.header.frame_id = self.odom_frame_id
            odom.pose.pose.position.x = self.x
            odom.pose.pose.position.y = self.y
            odom.pose.pose.position.z = 0
            odom.pose.pose.orientation = quaternion
            odom.child_frame_id = self.base_frame_id
            odom.twist.twist.linear.x = self.dx
            odom.twist.twist.linear.y = 0
            odom.twist.twist.angular.z = self.dr
            self.odomPub.publish(odom)
if __name__ == '__main__':
    rospy.init_node('repeater_tf_gazebo')

    tfBuffer = tf2_ros.Buffer()
    listener = tf2_ros.TransformListener(tfBuffer)
    
    pub = rospy.Publisher('/gazebo/set_model_state', ModelState, queue_size=10)

    while not rospy.is_shutdown():
        try:
            trans = tfBuffer.lookup_transform('base', 'tool0_controller', rospy.Time())
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
            rospy.sleep(1)
            continue
	
	point = Point(trans.transform.translation.x, trans.transform.translation.y, trans.transform.translation.z)	
	quaternion =Quaternion(trans.transform.rotation.x,trans.transform.rotation.y,trans.transform.rotation.z,trans.transform.rotation.w)	
	pose = Pose(point,quaternion)

	vector=Vector3(0,0,0)
	twist =Twist(vector,vector)
	

	msg = ModelState('camera', pose ,twist, 'world')
	rospy.loginfo(msg)
	pub.publish(msg)

    rospy.sleep(0.1)


markers = rviz_tools.RvizMarkers('/map', 'visualization_marker')

while not rospy.is_shutdown():

    # Axis:

    # Publish an axis using a numpy transform matrix
    T = transformations.translation_matrix((1, 0, 0))
    axis_length = 0.4
    axis_radius = 0.05
    markers.publishAxis(T, axis_length, axis_radius,
                        5.0)  # pose, axis length, radius, lifetime

    # Publish an axis using a ROS Pose Msg
    P = Pose(Point(2, 0, 0), Quaternion(0, 0, 0, 1))
    axis_length = 0.4
    axis_radius = 0.05
    markers.publishAxis(P, axis_length, axis_radius,
                        5.0)  # pose, axis length, radius, lifetime

    # Line:

    # Publish a line between two ROS Point Msgs
    point1 = Point(-2, 1, 0)
    point2 = Point(2, 1, 0)
    width = 0.05
    markers.publishLine(point1, point2, 'green', width,
                        5.0)  # point1, point2, color, width, lifetime

    # Publish a line between two ROS Poses
Exemple #35
0
    def __init__(self):
        rospy.init_node('position_nav_node', anonymous=True)
        rospy.on_shutdown(self.shutdown)

        # How long in seconds should the robot pause at each location?
        self.rest_time = rospy.get_param("~rest_time", 3)

        # Are we running in the fake simulator?
        self.fake_test = rospy.get_param("~fake_test", False)

        # Goal state return values
        goal_states = [
            'PENDING', 'ACTIVE', 'PREEMPTED', 'SUCCEEDED', 'ABORTED',
            'REJECTED', 'PREEMPTING', 'RECALLING', 'RECALLED', 'LOST'
        ]

        # Set up the goal locations. Poses are defined in the map frame.
        # An easy way to find the pose coordinates is to point-and-click
        # Nav Goals in RViz when running in the simulator.
        #
        # Pose coordinates are then displayed in the terminal
        # that was used to launch RViz.
        locations = dict()

        locations['one-1'] = Pose(Point(-0.6353, -0.1005, 0.0),
                                  Quaternion(0.0, 0.0, 0.9793, 0.20249))
        locations['one'] = Pose(Point(-1.4373, 0.2436, 0.0),
                                Quaternion(0.0, 0.0, 0.9764, 0.2159))
        locations['two-1'] = Pose(Point(-0.6353, -0.1005, 0.0),
                                  Quaternion(0.0, 0.0, 0.9793, 0.20249))
        locations['two'] = Pose(Point(-0.3821, -0.5335, 0.0),
                                Quaternion(0.0, 0.0, -0.8500, 0.5267))
        locations['three-1'] = Pose(Point(-0.1248, 0.4022, 0.0),
                                    Quaternion(0.0, 0.0, 0.7374, 0.67542))
        locations['three'] = Pose(Point(-0.8292, 1.0313, 0.0),
                                  Quaternion(0.0, 0.0, 0.9744, 0.2243))
        locations['four-1'] = Pose(Point(-0.1248, 0.4022, 0.0),
                                   Quaternion(0.0, 0.0, 0.7374, 0.67542))
        locations['four-2'] = Pose(Point(0.5078, 0.1495, 0.0),
                                   Quaternion(0.0, 0.0, 0.9818, 0.1898))
        locations['four'] = Pose(Point(0.4435, 0.3268, 0.0),
                                 Quaternion(0.0, 0.0, 0.5583, 0.8296))

        locations['initial'] = locations['one']

        # 2018.8.6 backhome code
        # locations['back'] = initial_pose

        # Publisher to manually control the robot (e.g. to stop it)
        self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist, queue_size=10)
        self.IOTnet_pub = rospy.Publisher('/IOT_cmd', IOTnet, queue_size=10)
        self.initial_pub = rospy.Publisher('initialpose',
                                           PoseWithCovarianceStamped,
                                           queue_size=10)

        # Subscribe to the move_base action server
        self.move_base = actionlib.SimpleActionClient("move_base",
                                                      MoveBaseAction)
        rospy.loginfo("Waiting for move_base action server...")

        # Wait 60 seconds for the action server to become available
        self.move_base.wait_for_server(rospy.Duration(60))

        rospy.loginfo("Connected to move base server")

        # A variable to hold the initial pose of the robot to be set by
        # the user in RViz
        initial_pose = PoseWithCovarianceStamped()
        # Variables to keep track of success rate, running time,
        # and distance traveled
        n_locations = len(locations)
        n_goals = 0
        n_successes = 0
        i = 0
        distance_traveled = 0
        start_time = rospy.Time.now()
        running_time = 0
        location = ""
        last_location = ""
        sequeue = ['four-2', 'four']

        # Get the initial pose from the user
        rospy.loginfo(
            "*** Click the 2D Pose Estimate button in RViz to set the robot's initial pose..."
        )
        #rospy.wait_for_message('initialpose', PoseWithCovarianceStamped)
        self.last_location = Pose()
        rospy.Subscriber('initialpose', PoseWithCovarianceStamped,
                         self.update_initial_pose)

        setpose = PoseWithCovarianceStamped(
            Header(0, rospy.Time(), "map"),
            PoseWithCovariance(locations['initial'], [
                0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.0, 0.0, 0.0, 0.0,
                0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                0.06853891945200942
            ]))
        self.initial_pub.publish(setpose)
        # Make sure we have the initial pose
        rospy.sleep(1)

        while initial_pose.header.stamp == "":
            rospy.sleep(1)
        rospy.sleep(1)
        #  locations['back'] = Pose()
        rospy.loginfo("Starting position navigation ")

        # Begin the main loop and run through a sequence of locations
        while not rospy.is_shutdown():
            # If we've gone through the all sequence, then exit
            if i == n_locations:
                rospy.logwarn("Now reach all destination, now exit...")
                rospy.signal_shutdown('Quit')
                break

            # Get the next location in the current sequence
            location = sequeue[i]

            # Keep track of the distance traveled.
            # Use updated initial pose if available.
            if initial_pose.header.stamp == "":
                distance = sqrt(
                    pow(
                        locations[location].position.x -
                        locations[last_location].position.x, 2) + pow(
                            locations[location].position.y -
                            locations[last_location].position.y, 2))
            else:
                rospy.loginfo("Updating current pose.")
                distance = sqrt(
                    pow(
                        locations[location].position.x -
                        initial_pose.pose.pose.position.x, 2) + pow(
                            locations[location].position.y -
                            initial_pose.pose.pose.position.y, 2))
                initial_pose.header.stamp = ""

            # Store the last location for distance calculations
            last_location = location

            # Increment the counters
            i += 1
            n_goals += 1

            # Set up the next goal location
            self.goal = MoveBaseGoal()
            self.goal.target_pose.pose = locations[location]
            self.goal.target_pose.header.frame_id = 'map'
            self.goal.target_pose.header.stamp = rospy.Time.now()

            # Let the user know where the robot is going next
            rospy.loginfo("Going to: " + str(location))

            # Start the robot toward the next location
            self.move_base.send_goal(self.goal)  #move_base.send_goal()

            # Allow 5 minutes to get there
            finished_within_time = self.move_base.wait_for_result(
                rospy.Duration(300))
            # Map to 4 point nav
            cur_position = -1
            position_seq = ['one', 'two', 'three', 'four']
            if str(location) in position_seq:
                cur_position = position_seq.index(str(location)) + 1

            # Check for success or failure
            if not finished_within_time:
                self.move_base.cancel_goal()  #move_base.cancle_goal()
                rospy.loginfo("Timed out achieving goal")
            else:
                state = self.move_base.get_state()  #move_base.get_state()
                if state == GoalStatus.SUCCEEDED:
                    rospy.loginfo("Goal succeeded!")
                    n_successes += 1
                    distance_traveled += distance
                    rospy.loginfo("State:" + str(state))
                    if cur_position != -1:
                        os.system("/home/sz/scripts/./arm_trash.sh")
                        self.IOTnet_pub.publish(5)
                        rospy.sleep(12)

                else:
                    rospy.loginfo("Goal failed with error code: " +
                                  str(goal_states[state]))
                    if cur_position != -1:
                        os.system("/home/sz/scripts/./arm_trash.sh")
                        self.IOTnet_pub.publish(5)
                        rospy.sleep(12)


#                if cur_position != -1:
#                   os.system("/home/sz/scripts/./arm_trash.sh")

# How long have we been running?
            running_time = rospy.Time.now() - start_time
            running_time = running_time.secs / 60.0

            # Print a summary success/failure, distance traveled and time elapsed
            rospy.loginfo("Success so far: " + str(n_successes) + "/" +
                          str(n_goals) + " = " +
                          str(100 * n_successes / n_goals) + "%")
            rospy.loginfo("Running time: " + str(trunc(running_time, 1)) +
                          " min Distance: " +
                          str(trunc(distance_traveled, 1)) + " m")
            rospy.sleep(self.rest_time)
    def __init__(self):
        self.n_obs = 2

        print('Start node')
        # Initialize node
        rospy.init_node('ellipse_publisher', anonymous=True)
        rate = rospy.Rate(200) # Frequency
        rospy.on_shutdown(self.shutdown)
        
        # Create publishers
        self.pub_vel = rospy.Publisher('lwr/joint_controllers/passive_ds_command_vel', Twist, queue_size=5)
        pub_orient = rospy.Publisher('lwr/joint_controllers/passive_ds_command_orient', Quaternion, queue_size=5)
        pub_cent = [0]*self.n_obs
        pub_cent[0] = rospy.Publisher("obs0_cent", PointStamped, queue_size=5)
        pub_cent[1] = rospy.Publisher("obs1_cent", PointStamped, queue_size=5)
        
        # Create listener
        pose_sub = [0]*self.n_obs
        pose_sub[0] = rospy.Subscriber("obstacle0", Obstacle, self.callback_obs0)
        pose_sub[1] = rospy.Subscriber("obstacle1", Obstacle, self.callback_obs1)

        basket_sub = rospy.Subscriber("basket", Obstacle, self.callback_basket)
        convey_sub = rospy.Subscriber("convey", Obstacle, self.callback_convey)

        attr_sub = rospy.Subscriber("attractor", PoseStamped, self.callback_attr)
        
        self.listener = tf.TransformListener() # TF listener

        self.obs = [0]*self.n_obs
        self.pos_obs=[0]*self.n_obs
        self.quat_obs=[0]*self.n_obs

        # Set watiing variables true
        print('wait obstacle')
        self.awaitObstacle = [True for i in range(self.n_obs)]
        self.awaitAttr = True
        self.obs_basket=[]        
        self.awaitBasket = True

        self.obs_convey=[]        
        self.awaitConvey = True

        while any(self.awaitObstacle):
            rospy.sleep(0.1)  # Wait zzzz* = 1 # wait

        while self.awaitBasket:
            print('Waiting for basket obstacle')
            rospy.sleep(0.1)  # Wait zzzz* = 1 # wait

        while self.awaitConvey:
            print('Waiting for conveyer obstacle')
            rospy.sleep(0.1)  # Wait zzzz* = 1 # wait

        while self.awaitAttr:
            rospy.sleep(0.1)

        print('got it all')

        # Get initial transformation
        rospy.loginfo('Getting Transformationss.')
        awaitingTrafo_ee=True
        awaitingTrafo_obs= [True] * self.n_obs
        awaitingTrafo_attr=True

        for i in range(self.n_obs):
            while awaitingTrafo_obs[i]: # Trafo obs1 position
                try:
                    self.pos_obs[i], self.quat_obs[i] = self.listener.lookupTransform("/world", "/object_{}/base_link".format(i), rospy.Time(0))
                
                    awaitingTrafo_obs[i]=False
                except:
                    rospy.loginfo('Waiting for obstacle {} TRAFO'.format(i))
                    rospy.sleep(0.2)  # Wait zzzz*

        while(awaitingTrafo_ee): # TRAFO robot position

            try: # Get transformation
                self.pos_rob, self.pos_rob = self.listener.lookupTransform("/world", "/lwr_7_link", rospy.Time(0))
                awaitingTrafo_ee=False
            except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                rospy.loginfo('Waiting for Robot TF')
                rospy.sleep(0.2)  # Wait zzzz*
            
        #while(awaitingTrafo_attr): # Trafo obs2 position
            # try:
                # self.pos_attr, self.quat_attr = self.listener.lookupTransform("/world", "/attr/base_link", rospy.Time(0))                   
                # awaitingTrafo_attr=False
            # except:
                # rospy.loginfo("Waiting for Attractor TF")
                # rospy.sleep(0.2)  # Wait zzzz*
                
        rospy.loginfo("All TF recieved")

        self.attractor_recieved = False # Set initial value
        # Variables for trajectory prediciton
        while not rospy.is_shutdown():
            if np.sum(np.abs([self.pos_attr.x, self.pos_attr.y, self.pos_attr.z] ) ) == 0:
                # Default pos -- command with angular position in high level controller
                rate.sleep()
                if self.attractor_recieved: # called only first time
                    self.zero_vel()
                    self.attractor_recieved = False
                continue
            
            self.attractor_recieved = True # Nonzero attractor recieved
            
            
            try: # Get transformation
                self.pos_rob, self.quat_rob = self.listener.lookupTransform("/world", "/lwr_7_link", rospy.Time(0))                   
            except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                rospy.loginfo("No <</lwr_7_link>> recieved")
                #continue
            x0_lwr7 = np.array([0,0,0])+np.array(self.pos_rob)

            obs_roboFrame = copy.deepcopy(self.obs)  # TODO -- change, because only reference is created not copy...
            for n in range(len(self.obs)): # for all bostacles
                try: # Get transformation
                    self.pos_obs[n], self.quat_obs[n] = self.listener.lookupTransform("/world", "/object_{}/base_link".format(n), rospy.Time(0))                   
                except:# (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                    rospy.loginfo("No <<object{}>> recieved".format(n))
                    #continue

                quat_thr = tf.transformations.quaternion_from_euler(self.obs[n].th_r[0],
                                                                    self.obs[n].th_r[1],
                                                                    self.obs[n].th_r[2])
                quat_thr_mocap = tf.transformations.quaternion_multiply(self.quat_obs[n], quat_thr)

                th_r_mocap = tf.transformations.euler_from_quaternion(quat_thr_mocap)

                # TODO apply transofrm to x0
                obs_roboFrame[n].x0 = self.obs[n].x0 + np.array(self.pos_obs[n]) # Transpose into reference frame
                obs_roboFrame[n].th_r =  [th_r_mocap[0],
                                          th_r_mocap[1],
                                          th_r_mocap[2]]# Rotate into reference frame
                                          
            q0 = Quaternion(1, 0, 0, 0) # Unit quaternion for trajectory

            x = x0_lwr7 # x for trajectory creating
            x_hat =  x0_lwr7 # x fro linear DS

            obs_roboFrame[0].w = [0,0,0]
            obs_roboFrame[0].xd = [0,0,0]

            x_attr = np.array([self.pos_attr.x, self.pos_attr.y, self.pos_attr.z]) # attracotr position

            # Create obstacles class instances
            obs_list = []
            for ii in range(self.n_obs):
                obs_list.append(
                    ObstacleClass(
                        a=obs_roboFrame[ii].a,
                        p=obs_roboFrame[ii].p,
                        x0=obs_roboFrame[ii].x0,
                        th_r=obs_roboFrame[ii].th_r,
                        sf=obs_roboFrame[ii].sf
                        ) )
                obs_list[ii].draw_ellipsoid()
            
            # Get common center
            obs_common_section(obs_list)

            # Add the basket wall as an obstacle -- it it is not considered in common center and therefore added later (HACK for fast simulation..)
            self.obs_basket.center_dyn = copy.deepcopy(self.obs_basket.x0)
            obs_list.append(self.obs_basket)

            self.obs_convey.center_dyn = copy.deepcopy(self.obs_convey.x0)
            obs_list.append(self.obs_convey)

            ds_init = linearAttractor_const(x, x0=x_attr)
            #print('vel_init', np.sqrt(np.sum(ds_init**2)))
            ds_modulated = obs_avoidance_convergence(x, ds_init, obs_list)
            #print('vel', np.sqrt(np.sum(ds_modulated**2)))

            vel = Twist()
            vel.linear = Vector3(ds_modulated[0],ds_modulated[1],ds_modulated[2])
            vel.angular = Vector3(0,0,0)
            self.pub_vel.publish(vel)

            quat = Quaternion(self.quat_attr.x,
                              self.quat_attr.y,
                              self.quat_attr.z,
                              self.quat_attr.w)
            pub_orient.publish(quat)
            
            print("Velcontrol <<world>>:", ds_modulated)

            showCenter = True
            if showCenter:
                point = PointStamped()
                point.header.frame_id = 'world'
                point.header.stamp = rospy.Time.now()

                for ii in range(self.n_obs):
                    if hasattr(obs_list[ii], 'center_dyn'):
                        point.point = Point(obs_list[ii].center_dyn[0],
                                            obs_list[ii].center_dyn[1],
                                            obs_list[ii].center_dyn[2])

                    else:
                        point.point = Point(obs_list[ii].x0[0],
                                            obs_list[ii].x0[1],
                                            obs_list[ii].x0[2])
                    pub_cent[ii].publish(point)


                
            
            rate.sleep()
            
        self.shutdown()
flagg = 0
init = 0
prevpnt = 0
offsetcnt = 0
stuckpnt = []
firstgoal=0
checkpnt = 0

cost_update = OccupancyGridUpdate()
costmap = OccupancyGrid()
grid = OccupancyGrid()
feedback = MoveBaseActionFeedback()
result = MoveBaseActionResult()

newPose = PoseStamped()
q = Quaternion()

pub = rospy.Publisher('/move_base_simple/goal', PoseStamped, queue_size=30)


def costupCb(data):
    global flagg, flagm
    global init
    global newcost
    flagg = 1
    cost_update = data

    if init == 1 and flagm == 0:
        gpnts = cost_update.data
        gwidth = cost_update.width
        gheight = cost_update.height
Exemple #38
0
# for i in desired_position:
# navigoal_pose=get_pose(i);

# input goal pose
goal_x = 5.4
goal_y = 0.1
goal_yaw = 0.0

# fill ROS message
pose = PoseStamped()
pose.header.stamp = rospy.Time.now()
pose.header.frame_id = "map"
pose.pose.position = Point(goal_x, goal_y, 0)
quat = tf.transformations.quaternion_from_euler(0, 0, goal_yaw)
pose.pose.orientation = Quaternion(*quat)

goal = MoveBaseGoal()
goal.target_pose = pose

# send message to the action server
cli.send_goal(goal)

# wait for the action server to complete the order
cli.wait_for_result()

# print result of navigation
action_state = cli.get_state()
if action_state == GoalStatus.SUCCEEDED:
    rospy.loginfo("Navigation Succeeded.")
    def run(self):
        rosRate = rospy.Rate(self.rate)
        rospy.loginfo("Publishing Odometry data at: " + str(self.rate) + " Hz")

        old_left = old_right = 0
        bad_encoder_count = 0

        while not rospy.is_shutdown() and not self.finished.isSet():
            now = rospy.Time.now()
            if now > self.t_next:
                dt = now - self.then
                self.then = now
                dt = dt.to_sec()

                # read encoders
                try:
                    left, right = self.mySerializer.get_encoder_count([1, 2])
                except:
                    self.encoder_error = True
                    rospy.loginfo("Could not read encoders: " +
                                  str(bad_encoder_count))
                    bad_encoder_count += 1
                    rospy.loginfo("Encoder counts at exception: " + str(left) +
                                  ", " + str(right))
                    continue

                if self.encoder_error:
                    self.enc_left = left
                    self.enc_right = right
                    self.encoder_error = False
                    continue

                # calculate odometry
                dleft = (left - self.enc_left) / self.ticks_per_meter
                dright = (right - self.enc_right) / self.ticks_per_meter

                self.enc_left = left
                self.enc_right = right

                dxy_ave = (dleft + dright) / 2.0
                dth = (dright - dleft) / self.wheel_track
                vxy = dxy_ave / dt
                vth = dth / dt

                if (dxy_ave != 0):
                    dx = cos(dth) * dxy_ave
                    dy = -sin(dth) * dxy_ave
                    self.x += (cos(self.th) * dx - sin(self.th) * dy)
                    self.y += (sin(self.th) * dx + cos(self.th) * dy)

                if (dth != 0):
                    self.th += dth

                quaternion = Quaternion()
                quaternion.x = 0.0
                quaternion.y = 0.0
                quaternion.z = sin(self.th / 2.0)
                quaternion.w = cos(self.th / 2.0)

                # Create the odometry transform frame broadcaster.
                self.odomBroadcaster.sendTransform(
                    (self.x, self.y, 0),
                    (quaternion.x, quaternion.y, quaternion.z, quaternion.w),
                    rospy.Time.now(), "base_link", "odom")

                odom = Odometry()
                odom.header.frame_id = "odom"
                odom.child_frame_id = "base_link"
                odom.header.stamp = now
                odom.pose.pose.position.x = self.x
                odom.pose.pose.position.y = self.y
                odom.pose.pose.position.z = 0
                odom.pose.pose.orientation = quaternion
                odom.twist.twist.linear.x = vxy
                odom.twist.twist.linear.y = 0
                odom.twist.twist.angular.z = vth

                self.odomPub.publish(odom)

                self.t_next = now + self.t_delta

            rosRate.sleep()
 def getOrientation(self, a, b):
     q = tf.transformations.quaternion_from_euler(0, 0, np.arctan2((b.y-a.y), (b.x-a.x)))
     return Quaternion(q[0], q[1], q[2], q[3])
roslib.load_manifest("interaction_cursor_rviz")

import rospy
from math import *
from interaction_cursor_msgs.msg import InteractionCursorUpdate
from geometry_msgs.msg import Point, Quaternion, Pose, PoseStamped

rospy.init_node("interaction_cursor_test", anonymous = True)
pub = rospy.Publisher("/rviz/interaction_cursor/update", InteractionCursorUpdate)

rate_float = 10
rate = rospy.Rate(rate_float)
rospy.Rate(2).sleep()

while not rospy.is_shutdown():
  t = rospy.get_time()
  icu = InteractionCursorUpdate()
  
  p = Point(0,-3*(1+sin(0.1*t*(2*pi))),0)
  q = Quaternion(0,0,0,1.0)
  icu.pose.pose.position = p
  icu.pose.pose.orientation = q
  icu.pose.header.frame_id = "base_link"

  print("Publishing a message!") #, q.w = %0.2f"%(q.w)
  pub.publish(icu)
  #print "Sleeping..."
  rate.sleep()
  #print "End of loop!"

    def generate_grasp_poses(self, object_pose):
        # Compute all the points of the sphere with step X
        # http://math.stackexchange.com/questions/264686/how-to-find-the-3d-coordinates-on-a-celestial-spheres-surface
        radius = self._grasp_desired_distance
        ori_x = 0.0
        ori_y = 0.0
        ori_z = 0.0
        sphere_poses = []
        rotated_q = quaternion_from_euler(0.0, 0.0, math.radians(180))

        yaw_qtty = int((self._max_degrees_yaw - self._min_degrees_yaw) /
                       self._step_degrees_yaw)  # NOQA
        pitch_qtty = int((self._max_degrees_pitch - self._min_degrees_pitch) /
                         self._step_degrees_pitch)  # NOQA
        info_str = "Creating poses with parameters:\n" + \
            "Radius: " + str(radius) + "\n" \
            "Yaw from: " + str(self._min_degrees_yaw) + \
            " to " + str(self._max_degrees_yaw) + " with step " + \
            str(self._step_degrees_yaw) + " degrees.\n" + \
            "Pitch from: " + str(self._min_degrees_pitch) + \
            " to " + str(self._max_degrees_pitch) + \
            " with step " + str(self._step_degrees_pitch) + " degrees.\n" + \
            "Total: " + str(yaw_qtty) + " yaw * " + str(pitch_qtty) + \
            " pitch = " + str(yaw_qtty * pitch_qtty) + " grap poses."
        rospy.loginfo(info_str)

        # altitude is yaw
        for altitude in range(self._min_degrees_yaw, self._max_degrees_yaw,
                              self._step_degrees_yaw):  # NOQA
            altitude = math.radians(altitude)
            # azimuth is pitch
            for azimuth in range(self._min_degrees_pitch,
                                 self._max_degrees_pitch,
                                 self._step_degrees_pitch):  # NOQA
                azimuth = math.radians(azimuth)
                # This gets all the positions
                x = ori_x + radius * math.cos(azimuth) * math.cos(altitude)
                y = ori_y + radius * math.sin(altitude)
                z = ori_z + radius * math.sin(azimuth) * math.cos(altitude)
                # this gets all the vectors pointing outside of the center
                # quaternion as x y z w
                q = quaternion_from_vectors([radius, 0.0, 0.0], [x, y, z])
                # Cannot compute so the vectors are parallel
                if q is None:
                    # with this we add the missing arrow
                    q = rotated_q
                # We invert the orientations to look inwards by multiplying
                # with a quaternion 180deg rotation on yaw
                q = quaternion_multiply(q, rotated_q)

                # We actually want roll to be always 0.0 so we approach
                # the object with the gripper always horizontal
                # this rotation can be tuned with the dynamic params
                # multiplying later on
                roll, pitch, yaw = euler_from_quaternion(q)
                q = quaternion_from_euler(math.radians(0.0), pitch, yaw)

                x += object_pose.pose.position.x
                y += object_pose.pose.position.y
                z += object_pose.pose.position.z
                current_pose = Pose(Point(x, y, z), Quaternion(*q))
                sphere_poses.append(current_pose)
        return sphere_poses
Exemple #43
0
def yaw_to_quaternion(yaw):
    return Quaternion(0,0,sin(yaw / 2) , cos(yaw / 2))
Exemple #44
0
print(path)
rospy.wait_for_service("/gazebo/delete_model")
try:
    delete = rospy.ServiceProxy("/gazebo/delete_model", DeleteModel)
    delete("polaris_ranger_ev")
    delete("marker")
except rospy.ServiceException as e:
    print("Service call failed: ", e)

rospy.wait_for_service("/gazebo/spawn_urdf_model")
try:
    spawner = rospy.ServiceProxy("/gazebo/spawn_urdf_model", SpawnModel)
    spawner(
        "polaris_ranger_ev",
        open(path + "/urdf/polaris.urdf", 'r').read(), "polaris",
        Pose(position=Point(x, y, 1),
             orientation=Quaternion(0, 0, 0.3826834, 0.9238795)), "world")
except rospy.ServiceException as e:
    print("Service call failed: ", e)

rospy.wait_for_service("/gazebo/spawn_sdf_model")
try:
    spawner = rospy.ServiceProxy("/gazebo/spawn_sdf_model", SpawnModel)
    spawner("marker",
            open(path + "/models/marker/model.sdf", 'r').read(), "marker",
            Pose(position=Point(x, y, 1), orientation=Quaternion(0, 0, 0, 0)),
            "world")
except rospy.ServiceException as e:
    print("Service call failed: ", e)
Exemple #45
0
def angle_to_quaternion(angle):
    """Convert an angle in radians into a quaternion _message_."""
    return Quaternion(*tf.transformations.quaternion_from_euler(0, 0, angle))
Exemple #46
0
rospy.init_node('demo_move')
rospy.sleep(2.)
left_arm = ArmPlanner("left")
right_arm = ArmPlanner("right")
i = 0

#position = Point(0.4, 0.2, 0.6)
#orientation = Quaternion(0., 0., 0., 1.)
#left_arm.move_pose(position, orientation)
#position = Point(0.4, -0.2, 0.6)
#a = list(quaternion_from_euler(0., 0., pi/2))
#orientation = Quaternion(*a)
#right_arm.move_pose(position, orientation)

left_arm.add_box("xD", Point(0., 0., 0.3), Quaternion(0., 0., 0., 1.),
                 (0.2, 2.0, 0.6))
right_arm.add_box("xD", Point(0., 0., 0.3), Quaternion(0., 0., 0., 1.),
                  (0.2, 2.0, 0.6))

z = 1.0
y = 0.3
positions_l = [
    Point(0.4, y, z + 0.1),
    Point(0.4, 0.2, z),
    Point(0.3, y + 0.1, z)
]
orientations_l = [
    Quaternion(0., 0., 0., 1.),
    Quaternion(0., 0., 0., 1.),
    Quaternion(0., 0., -sqrt(2) / 2,
Exemple #47
0
def main():
    rospy.init_node("ik_pick_and_place_demo")
    limb = 'right'  #this chooses arm, change to calibrate appropriate gripper and move arm
    hover_distance = 0.15  # meters
    #call/define class?
    pnp = PickAndPlace(limb, hover_distance)
    limb_choice = pnp._limb  #from class pnp, get limb passed to it
    gripper = pnp._gripper  #also get gripper
    ######lets try to get vicon data here for target pose etc##################
    ViconData = vicon()
    print ViconData
    ######################################################################

    #subscribe to right hand camera, centering loop has been commented out for now
    #when center over object, then call "pick" function to pick it up because hand orientation should already
    #be oriented along z-axis.  Pick function probably needs to be updated to slow down pick to accurately pick it up
    rospy.Subscriber('/cameras/right_hand_camera/image', Image,
                     pnp.image_callback)

    #actual Starting Joint angles for arm
    starting_joint_angles = limb_choice.joint_angles()
    print "starting joint angles are:"
    print limb_choice.joint_angles()

    # gripper calibrate
    gripper.calibrate()
    print "Calibrate"
    if gripper.close():
        gripper.open()
        rospy.sleep(0.5)
        print "gripper closed, opening girpper"

    # An orientation for gripper fingers to be overhead and parallel to the obj


#####lets change this, make it more variable for different applications, object maybe be at angle
    current_pose = limb_choice.endpoint_pose()
    x = current_pose['orientation'].x
    y = current_pose['orientation'].y
    z = current_pose['orientation'].z
    w = current_pose['orientation'].w

    overhead_orientation = Quaternion(x, y, z, w)
    block_poses = list()
    # The Pose of the block in its initial location.
    # You may wish to replace these poses with estimates
    # from a perception node.
    block_poses.append(
        Pose(position=Point(x=0.65, y=-0.45, z=00.129),
             orientation=overhead_orientation))
    # Feel free to add additional desired poses for the object.
    # Each additional pose will get its own pick and place.
    block_poses.append(
        Pose(position=Point(x=0.75, y=0.0, z=-0.129),
             orientation=overhead_orientation))
    # Move to the desired starting angles
    pnp.move_to_start(starting_joint_angles)
    idx = 0
    while not rospy.is_shutdown():
        print("\nArm Picking...")
        pnp.pick(block_poses[idx])
        print("\nArm Placing...")
        idx = (idx + 1) % len(block_poses)
        pnp.place(block_poses[idx])
        limb = 'right'
        print("\nRight Arm Picking...")
        pnp.pick(block_poses[idx])
        print("\nRight Arm Placing...")
        idx = (idx + 1) % len(block_poses)
        pnp.place(block_poses[idx])
        limb = 'right'
    return 0
Exemple #48
0
 def get_quaternion(self, yaw):
     q_angle = quaternion_from_euler(0, 0, yaw,
                                     axes='sxyz')  # is axes ok ??
     q = Quaternion(*q_angle)
     return q
Exemple #49
0
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Point
from geometry_msgs.msg import Quaternion

if __name__ == '__main__':
    #initial ros node
    rospy.init_node('vive_pose', anonymous=True)
    pub = rospy.Publisher("/vive_pose", PoseStamped, queue_size=10)

    #set refresh rate
    rate = rospy.Rate(120)

    #initial vive
    vive = openvr_wrapper.OpenvrWrapper()
    rospy.loginfo("=== vive ros started ===")
    vive.print_discovered_objects()
    vive_poseStamped = PoseStamped()
    vive_poseStamped.header.frame_id = 'world'

    # get the poseStamped from pose
    while not rospy.is_shutdown():
        translation, quaternion = vive.devices[
            "tracker_1"].get_pose_quaternion()
        vive_poseStamped.pose.position = Point(*translation)
        vive_poseStamped.pose.orientation = Quaternion(*quaternion)
        vive_poseStamped.header.stamp = rospy.Time.now()
        pub.publish(vive_poseStamped)

    rate.sleep()
    def pc2CB(self, msg):
        if self.init:
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(cloud, 'x', self.x_min, self.x_max)
            filtered_cloud = filtering_helper.do_passthrough_filter(fil_cloud, 'y', self.y_min, self.y_max)
            # Turn cloud into colorless cloud
            colorless_cloud = pcl_helper.XYZRGB_to_XYZ(filtered_cloud)

            # Get groups of cluster of points
#            clusters = self.getClusters(colorless_cloud, 0.05, 20, 250)   # Table on Top AGV - 60~230
            clusters = self.getClusters(colorless_cloud, 0.05, 20, 350)
            print "len(clusters)", len(clusters)
#            for i in range(0,len(clusters)):
#                print "len(clusters[", i, "]", len(clusters[i])

            # Get mean points from clusters
            mean_pts = []
            for cluster in clusters:
                mean_pts.append(self.getMeanPoint(cluster, filtered_cloud))
            print "len(mean_pts)", len(mean_pts)
#            for i in range(0,len(mean_pts)):
#                print "mean_pts[", i, "]", mean_pts[i]

            # Remove other points, leave the table points only
            table_pts = []
            table_pts = self.getTablePoints(mean_pts)
            print "len(table_pts)", len(table_pts)
            print table_pts

            for i in range(len(table_pts)):
                self.vizPoint(i, table_pts[i], ColorRGBA(1,1,1,1), "base_link")

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")

            # Finding 2 middle points from 4 table legs and use them as path_pts
            path_pts = []
            if(len(table_pts)==4):
                for p1 in table_pts:
                    for p2 in table_pts:
                        if(p1==p2):
                            break
                        # Register 2 middle_pts of 4 table legs
                        if(0.73 < self.getDistance(p1, p2) < 0.83):
                            path_pts.append(self.getMiddlePoint(p1, p2))
                            break

            print "len(path_pts)", len(path_pts)
            print path_pts

            # Turn path_pts into map frame
            for i in range(len(path_pts)):
                path_pts[i] = tf2_geometry_msgs.do_transform_pose(self.toPoseStamped(path_pts[i], Quaternion(0,0,0,1)), trans).pose.position

            # Record the starting point once
            if self.record_once:
                self.start_pt = trans.transform.translation
                self.record_once = False

            # Create a list with distance information between initial_pt and path_pts
            distance_list = [self.getDistance(p, self.start_pt) for p in path_pts]

            # Rearrange the path_pts to be in ascending order
            path_pts = self.getAscend(path_pts, distance_list)

            # Duplicate the path_pts to prevent from being edited
            table_pathpts = path_pts[:]

            if(len(table_pathpts)==2):
                # Get gradient of the Line of Best Fit
                m1 = self.getGradientLineOfBestFit(table_pathpts)

                # Insert another point on the line with d[m] away from the 1st table_pathpts
                path_pts.insert(0, self.getPointOnLine(m1, 1.0, table_pathpts[0], self.start_pt))

                # Insert a point ahead d[m] of table to the begin of path_pts
#                ahead_pt = self.getPointAheadTable(table_pathpts, 1.0)
#                path_pts.insert(0, ahead_pt)

                # Insert a point ahead d[m] of table to the middle of table_pathpts
#                midway_pt = self.getPointAheadTable(table_pathpts, -0.4)
#                path_pts.insert(2, midway_pt)
                # Insert another point on the line with d[m] away from the 1st table_pathpts
#                path_pts.insert(2, self.getPointOnLine(m1, 0.05, table_pathpts[1], self.start_pt))

                # Remove the last point of path_pts
#                path_pts = path_pts[:len(path_pts)-1]

                # Add the AGV start_pt to the begin of path_pts
#                path_pts.insert(0, self.start_pt)

                # Visualize the path_pts
#                self.vizPoint(0, path_pts[0], ColorRGBA(1,0,0,1), "map") # Red
#                self.vizPoint(1, path_pts[1], ColorRGBA(1,1,0,1), "map") # Yellow
#                self.vizPoint(2, path_pts[2], ColorRGBA(0,0,1,1), "map") # Blue

                # Find the heading of the table (last 2 path_pts)
                heading_q = self.getOrientation(table_pathpts[0], table_pathpts[1])

                # Publish the path
                self.route_pub.publish(self.getPath2Table(path_pts, heading_q))
            print "-----------------"
Exemple #51
0
    def __init__(self):
        rospy.init_node('random_walker', anonymous=False)
        rospy.on_shutdown(self.shutdown)

        self.closestPerson = Point()
        self.closestPerson.x = 10.0
        self.closestPerson.y = 0.0
        self.closestPerson.z = 1.5

        #Subscribe to closest points
        rospy.Subscriber("/closest_person", PointStamped, self.callback)

        #Create waypoint list
        waypoints = list()

        goal = MoveBaseGoal()
        goal.target_pose.header.frame_id = 'map'
        goal.target_pose.header.stamp = rospy.Time.now()

        quadro_ist = WayPoint()
        goal.target_pose.pose = Pose(
            Point(6.82583618164, -0.298132061958, 0.0),
            Quaternion(0.0, 0.0, 0.0562285211913, 0.998417925222))
        quadro_ist.goal = deepcopy(goal)
        quadro_ist.gaze.fixation_point.point.x = 1.5
        quadro_ist.gaze.fixation_point.point.y = -0.161539276911
        quadro_ist.gaze.fixation_point.point.z = 0.619330521451
        quadro_ist.name = "Quadro IST"
        quadro_ist.speechString = "_________Quadro interessante"
        waypoints.append(quadro_ist)

        nem_carne_nem_peixe = WayPoint()
        goal.target_pose.pose = Pose(
            Point(6.8226184845, -0.382613778114, 0.0),
            Quaternion(0.0, 0.0, 0.994299805782, -0.10662033681))
        nem_carne_nem_peixe.goal = deepcopy(goal)
        nem_carne_nem_peixe.gaze.fixation_point.point.x = 1.5
        nem_carne_nem_peixe.gaze.fixation_point.point.y = -0.601618178873
        nem_carne_nem_peixe.gaze.fixation_point.point.z = 0.224095496349
        nem_carne_nem_peixe.name = "Nem carne nem peixe"
        nem_carne_nem_peixe.speechString = "silence5s"
        waypoints.append(nem_carne_nem_peixe)

        escadas_estudo = WayPoint()
        goal.target_pose.pose = Pose(
            Point(5.77421236038, -0.905279278755, 0.0),
            Quaternion(0.0, 0.0, 0.977314637185, -0.211792587085))
        escadas_estudo.goal = deepcopy(goal)
        escadas_estudo.gaze.fixation_point.point.x = 1.5
        escadas_estudo.gaze.fixation_point.point.y = -0.19578182639
        escadas_estudo.gaze.fixation_point.point.z = -0.0652612527037
        escadas_estudo.name = "Escadas da sala de estudo"
        escadas_estudo.speechString = "_________Cuidado com as escadas"
        waypoints.append(escadas_estudo)

        placar_viva_ritmo = WayPoint()
        goal.target_pose.pose = Pose(
            Point(12.8644828796, -2.74097704887, 0.0),
            Quaternion(0.0, 0.0, -0.598005480569, 0.801492011944))
        placar_viva_ritmo.goal = deepcopy(goal)
        placar_viva_ritmo.gaze.fixation_point.point.x = 1.5
        placar_viva_ritmo.gaze.fixation_point.point.y = -0.173235377375
        placar_viva_ritmo.gaze.fixation_point.point.z = 0.498420726409
        placar_viva_ritmo.name = "Placar do Viva o Ritmo"
        placar_viva_ritmo.speechString = "silence5s"
        waypoints.append(placar_viva_ritmo)

        placar_aluger = WayPoint()
        goal.target_pose.pose = Pose(
            Point(14.8170385361, -2.95126199722, 0.0),
            Quaternion(0.0, 0.0, -0.761200110734, 0.648517071031))
        placar_aluger.goal = deepcopy(goal)
        placar_aluger.gaze.fixation_point.point.x = 1.5
        placar_aluger.gaze.fixation_point.point.y = 0.0973222810166
        placar_aluger.gaze.fixation_point.point.z = 0.960639952322
        placar_aluger.name = "Placar de aluguer"
        placar_aluger.speechString = "_________Olha, pessoas a alugar coisas."
        waypoints.append(placar_aluger)

        placar_wc = WayPoint()
        goal.target_pose.pose = Pose(
            Point(18.2798576355, -2.99458551407, 0.0),
            Quaternion(0.0, 0.0, -0.708851171985, 0.705358076424))
        placar_wc.goal = deepcopy(goal)
        placar_wc.gaze.fixation_point.point.x = 1.5
        placar_wc.gaze.fixation_point.point.y = -0.255905735466
        placar_wc.gaze.fixation_point.point.z = -0.132903105059
        placar_wc.name = "Placar do WC"
        placar_wc.speechString = "_________Não consigo ler o que está neste placard"
        waypoints.append(placar_wc)

        maquina_comida = WayPoint()
        goal.target_pose.pose = Pose(
            Point(19.922203064, 12.430390358, 0.0),
            Quaternion(0.0, 0.0, 0.620217316172, 0.78443003558))
        maquina_comida.goal = deepcopy(goal)
        maquina_comida.gaze.fixation_point.point.x = 1.5
        maquina_comida.gaze.fixation_point.point.y = 0.104837718987
        maquina_comida.gaze.fixation_point.point.z = 0.791535363655
        maquina_comida.name = "Maquina da comida"
        maquina_comida.speechString = "_________Parece ser bom. Mas não consigo comer."
        waypoints.append(maquina_comida)

        maquina_comida2 = WayPoint()
        goal.target_pose.pose = Pose(
            Point(19.922203064, 12.430390358, 0.0),
            Quaternion(0.0, 0.0, 0.620217316172, 0.78443003558))
        maquina_comida2.goal = deepcopy(goal)
        maquina_comida2.gaze.fixation_point.point.x = 1.5
        maquina_comida2.gaze.fixation_point.point.y = -0.0830495813139
        maquina_comida2.gaze.fixation_point.point.z = -1.02727840068
        maquina_comida2.name = "Maquina da comida2"
        maquina_comida2.speechString = "silence5s"
        waypoints.append(maquina_comida2)

        maquina_comida3 = WayPoint()
        goal.target_pose.pose = Pose(
            Point(19.922203064, 12.430390358, 0.0),
            Quaternion(0.0, 0.0, 0.620217316172, 0.78443003558))
        maquina_comida3.goal = deepcopy(goal)
        maquina_comida3.gaze.fixation_point.point.x = 1.5
        maquina_comida3.gaze.fixation_point.point.y = 0.00713634985137
        maquina_comida3.gaze.fixation_point.point.z = 0.35562127909
        maquina_comida3.name = "Maquina da comida 3"

        waypoints.append(maquina_comida3)

        maquina_coca_cola = WayPoint()
        goal.target_pose.pose = Pose(
            Point(18.9359931946, 12.5539255142, 0.0),
            Quaternion(0.0, 0.0, 0.728744349156, 0.684785859647))
        maquina_coca_cola.goal = deepcopy(goal)
        maquina_coca_cola.gaze.fixation_point.point.x = 1.5
        maquina_coca_cola.gaze.fixation_point.point.y = -0.278452184481
        maquina_coca_cola.gaze.fixation_point.point.z = 0.34810557026
        maquina_coca_cola.name = "Maquina coca-cola"
        maquina_coca_cola.speechString = "_________Olha, coca-cola"
        waypoints.append(maquina_coca_cola)

        maquina_cafe = WayPoint()
        goal.target_pose.pose = Pose(
            Point(18.9359931946, 12.5539255142, 0.0),
            Quaternion(0.0, 0.0, 0.728744349156, 0.684785859647))
        maquina_cafe.goal = deepcopy(goal)
        maquina_cafe.gaze.fixation_point.point.x = 1.5
        maquina_cafe.gaze.fixation_point.point.y = -0.0379565481792
        maquina_cafe.gaze.fixation_point.point.z = 0.0700224520074
        maquina_cafe.name = "Maquina do cafe"
        maquina_cafe.speechString = "_________Sou alérgico ao café. E a líquidos no geral."
        waypoints.append(maquina_cafe)

        placard_multibanco = WayPoint()
        goal.target_pose.pose = Pose(
            Point(13.902557373, 13.0574836731, 0.0),
            Quaternion(0.0, 0.0, 0.695779308409, 0.718255632759))
        placard_multibanco.goal = deepcopy(goal)
        placard_multibanco.gaze.fixation_point.point.x = 1.5
        placard_multibanco.gaze.fixation_point.point.y = 0.187508077078
        placard_multibanco.gaze.fixation_point.point.z = 0.55854680238
        placard_multibanco.name = "Placard do multibanco"
        placard_multibanco.speechString = "_________Um"
        waypoints.append(placard_multibanco)

        escadas_seguranca = WayPoint()
        goal.target_pose.pose = Pose(
            Point(6.76146888733, 13.4361066818, 0.0),
            Quaternion(0.0, 0.0, 0.917695145462, -0.397285313087))
        escadas_seguranca.goal = deepcopy(goal)
        escadas_seguranca.gaze.fixation_point.point.x = 1.5
        escadas_seguranca.gaze.fixation_point.point.y = -0.0755340082393
        escadas_seguranca.gaze.fixation_point.point.z = 0.37065283186
        escadas_seguranca.name = "Escadas da seguranca"
        escadas_seguranca.speechString = "_________Não consigo subir escadas"
        waypoints.append(escadas_seguranca)

        relogio = WayPoint()
        goal.target_pose.pose = Pose(
            Point(7.147149086, 10.979344368, 0.0),
            Quaternion(0.0, 0.0, -0.0204962519017, 0.999789929764))
        relogio.goal = deepcopy(goal)
        relogio.gaze.fixation_point.point.x = 1.5
        relogio.gaze.fixation_point.point.y = 0.0836535250107
        relogio.gaze.fixation_point.point.z = 0.905399122557
        relogio.name = "Relogio"
        relogio.speechString = "_________Tic Tac Tic Tac"

        waypoints.append(relogio)

        tv_elevadores = WayPoint()
        goal.target_pose.pose = Pose(
            Point(13.1157531738, 8.31262111664, 0.0),
            Quaternion(0.0, 0.0, -0.00998402291442, 0.999950158401))
        tv_elevadores.goal = deepcopy(goal)
        tv_elevadores.gaze.fixation_point.point.x = 1.5
        tv_elevadores.gaze.fixation_point.point.y = 0.140865129254
        tv_elevadores.gaze.fixation_point.point.z = 0.766451514858
        tv_elevadores.name = "TV dos elevadores"
        tv_elevadores.speechString = "silence5s"
        waypoints.append(tv_elevadores)

        pilhas = WayPoint()
        goal.target_pose.pose = Pose(
            Point(6.41749477386, 8.2398443222, 0.0),
            Quaternion(0.0, 0.0, 0.999997911952, 0.00204354865763))
        pilhas.goal = deepcopy(goal)
        pilhas.gaze.fixation_point.point.x = 1.5
        pilhas.gaze.fixation_point.point.y = 0.304327019719
        pilhas.gaze.fixation_point.point.z = 0.856358791421
        pilhas.name = "Pilhas"
        pilhas.speechString = "_________Isto dá-me fome"
        waypoints.append(pilhas)

        self.gaze_active = rospy.get_param("~gaze_active", True)
        self.comlicenca_active = rospy.get_param("~comlicenca_active", True)
        self.beep_comlicenca_active = rospy.get_param(
            "~beep_comlicenca_active", False)

        #Initialize
        self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist)
        self.move_base = actionlib.SimpleActionClient("move_base",
                                                      MoveBaseAction)
        self.move_base.wait_for_server(rospy.Duration(60))
        rospy.loginfo("Connected to move base server")

        self.gaze_client = actionlib.SimpleActionClient(
            'gaze', vizzy_msgs.msg.GazeAction)
        self.gaze_client.wait_for_server()
        rospy.loginfo("Connected to gaze server")

        self.speech_client = actionlib.SimpleActionClient(
            'nuance_speech_tts', woz_dialog_msgs.msg.SpeechAction)
        rospy.loginfo("Connected to speech server")
        rospy.loginfo("Starting to wander around")

        while not rospy.is_shutdown():

            home_goal = vizzy_msgs.msg.GazeGoal()
            home_goal.type = vizzy_msgs.msg.GazeGoal.HOME
            self.gaze_client.send_goal(home_goal)

            i = randint(0, len(waypoints) - 1)
            print('index: ' + str(i))
            way = waypoints[i]
            print('Moving to ' + way.name)
            test = self.move(way)
            if not test:
                continue

            print('gazing')
            self.gaze_client.send_goal(way.gaze)

            #self.gaze_client.wait_for_result()
            sleep(1)
            speech_goal = woz_dialog_msgs.msg.SpeechGoal()
            speech_goal.voice = 'Joaquim'
            speech_goal.language = 'pt_PT'
            speech_goal.message = "silence5s"
            self.speech_client.send_goal(speech_goal)
            #self.speech_client.wait_for_result()
            sleep(5)
            speech_goal.message = way.speechString
            self.speech_client.send_goal(speech_goal)
            sleep(2)
Exemple #52
0
	while abs(pan_status.error) > error or abs(tilt_status.error) > error:
		pan_status = rospy.wait_for_message("/alice/pan_controller/state", JointControllerState)
		tilt_status = rospy.wait_for_message("/alice/tilt_controller/state", JointControllerState)



	pan_pos = 1
	idx = 0
	while True:
		face = 0
		while face < 360:
			
			while True:
				odom = rospy.wait_for_message("/odom", Odometry)
				orientation = Quaternion(odom.orientation.x, odom.orientation.y, odom.orientation.z, odom.orientation.w)
				orientation_euler = tf.transformations.euler_from_quaternion(orientation)
			    print "direction face: {} degrees".format(orientation_euler[2])
				i = input('Turn Alice and press enter to continue: ')
			    if not i:
			        break

			panPosition.publish(-pan_pos*math.pi/2)
			print "turning..."
			pan_status = rospy.wait_for_message("/alice/pan_controller/state", JointControllerState)
			while abs(pan_status.error) < 1:
				pan_status = rospy.wait_for_message("/alice/pan_controller/state", JointControllerState)
			while abs(pan_status.error) > error:
				pan_status = rospy.wait_for_message("/alice/pan_controller/state", JointControllerState)
				image_data = rospy.wait_for_message("/front_xtion/rgb/image_raw", Image)
				image = cvBridge.imgmsg_to_cv2(image_data, "bgr8")
    def _broadcast_odometry_info(self, line_parts):
        """
        Broadcast all data from propeller monitored sensors on the appropriate topics.
        """
        # If we got this far, we can assume that the Propeller board is initialized and the motors should be on.
        # The _switch_motors() function will deal with the _SafeToOparete issue
        if not self._motorsOn:
            self._switch_motors(True)
        parts_count = len(line_parts)

        # rospy.logwarn(partsCount)
        if parts_count != 8:  # Just discard short/long lines, increment this as lines get longer
            pass

        x = float(line_parts[1])
        y = float(line_parts[2])
        # 3 is odom based heading and 4 is gyro based
        theta = float(line_parts[3])  # On ArloBot odometry derived heading works best.
        alternate_theta = float(line_parts[4])

        vx = float(line_parts[5])
        omega = float(line_parts[6])

        quaternion = Quaternion()
        quaternion.x = 0.0
        quaternion.y = 0.0
        quaternion.z = sin(theta / 2.0)
        quaternion.w = cos(theta / 2.0)

        ros_now = rospy.Time.now()

        # First, we'll publish the transform from frame odom to frame base_link over tf
        # Note that sendTransform requires that 'to' is passed in before 'from' while
        # the TransformListener' lookupTransform function expects 'from' first followed by 'to'.
        # This transform conflicts with transforms built into the Turtle stack
        # http://wiki.ros.org/tf/Tutorials/Writing%20a%20tf%20broadcaster%20%28Python%29
        # This is done in/with the robot_pose_ekf because it can integrate IMU/gyro data
        # using an "extended Kalman filter"
        # REMOVE this "line" if you use robot_pose_ekf
        self._OdometryTransformBroadcaster.sendTransform(
            (x, y, 0),
            (quaternion.x, quaternion.y, quaternion.z, quaternion.w),
            ros_now,
            "base_footprint",
            "odom"
        )

        # next, we will publish the odometry message over ROS
        odometry = Odometry()
        odometry.header.frame_id = "odom"
        odometry.header.stamp = ros_now
        odometry.pose.pose.position.x = x
        odometry.pose.pose.position.y = y
        odometry.pose.pose.position.z = 0
        odometry.pose.pose.orientation = quaternion

        odometry.child_frame_id = "base_link"
        odometry.twist.twist.linear.x = vx
        odometry.twist.twist.linear.y = 0
        odometry.twist.twist.angular.z = omega

        # Save last X, Y and Heading for reuse if we have to reset:
        self.lastX = x
        self.lastY = y
        self.lastHeading = theta
        self.alternate_heading = alternate_theta

        # robot_pose_ekf needs these covariances and we may need to adjust them.
        # From: ~/turtlebot/src/turtlebot_create/create_node/src/create_node/covariances.py
        # However, this is not needed because we are not using robot_pose_ekf
        # odometry.pose.covariance = [1e-3, 0, 0, 0, 0, 0,
        # 0, 1e-3, 0, 0, 0, 0,
        #                         0, 0, 1e6, 0, 0, 0,
        #                         0, 0, 0, 1e6, 0, 0,
        #                         0, 0, 0, 0, 1e6, 0,
        #                         0, 0, 0, 0, 0, 1e3]
        #
        # odometry.twist.covariance = [1e-3, 0, 0, 0, 0, 0,
        #                          0, 1e-3, 0, 0, 0, 0,
        #                          0, 0, 1e6, 0, 0, 0,
        #                          0, 0, 0, 1e6, 0, 0,
        #                          0, 0, 0, 0, 1e6, 0,
        #                          0, 0, 0, 0, 0, 1e3]

        self._OdometryPublisher.publish(odometry)

        # Joint State for Turtlebot stack
        # Note without this transform publisher the wheels will
        # be white, stuck at 0, 0, 0 and RVIZ will tell you that
        # there is no transform from the wheel_links to the base_
        # However, instead of publishing a stream of pointless transforms,
        # I have made the joint static in the URDF like this:
        # create.urdf.xacro:
        # <joint name="right_wheel_joint" type="fixed">
        # NOTE This may prevent Gazebo from working with this model
        # Here is the old Joint State in case you want to restore it:
        # js = JointState(name=["left_wheel_joint", "right_wheel_joint", "front_castor_joint", "back_castor_joint"],
        # position=[0, 0, 0, 0], velocity=[0, 0, 0, 0], effort=[0, 0, 0, 0])
        # js.header.stamp = ros_now
        # self.joint_states_pub.publish(js)

        # Fake laser from "PING" Ultrasonic Sensor and IR Distance Sensor input:
        # http://wiki.ros.org/navigation/Tutorials/RobotSetup/TF
        # Use:
        # roslaunch arlobot_rviz_launchers view_robot.launch
        # to view this well for debugging and testing.

        # The purpose of this is two fold:
        # 1. It REALLY helps adjusting values in the Propeller and ROS
        # when I can visualize the sensor output in RVIZ!
        # For this purpose, a lot of the parameters are a matter of personal taste.
        #   Whatever makes it easiest to visualize is best.
        # 2. I want to allow AMCL to use this data to avoid obstacles that the Kinect/Xtion miss.
        #     For the second purpose, some of the parameters here may need to be tweaked,
        #   to adjust how large an object looks to AMCL.
        # Note that we should also adjust the distance at which AMCL takes this data
        # into account either here or elsewhere.

        # Transform: http://wiki.ros.org/tf/Tutorials/Writing%20a%20tf%20broadcaster%20%28Python%29

        # We do not need to broadcast a transform,
        # because it is static and contained within the URDF files now.
        # Here is the static transform for reference anyway:
        # self._SonarTransformBroadcaster.sendTransform(
        #     (0.1, 0.0, 0.2),
        #     (0, 0, 0, 1),
        #     ros_now,
        #     "sonar_laser",
        #     "base_link"
        #     )

        # Some help:
        # http://goo.gl/ZU9XrJ

        # TODO: I'm doing this all in degrees and then converting to Radians later.
        # Is there any way to do this in Radians?
        # I just don't know how to create and fill an array with "Radians"
        # since they are not rational numbers, but multiples of PI, thus the degrees.
        num_readings = 360  # How about 1 per degree?
        #num_reeading_multiple = 2 # We have to track this so we know where to put the readings!
        #num_readings = 360 * num_reeading_multiple
        laser_frequency = 100  # I'm not sure how to decide what to use here.
        # This is the fake distance to set all empty slots, and slots we consider "out of range"
        artificial_far_distance = 10
        #ranges = [1] * num_readings # Fill array with fake "1" readings for testing
        # Fill array with artificial_far_distance (not 0) and then overlap with real readings
        ping_ranges = [artificial_far_distance] * num_readings
        # If we use 0, then it won't clear the obstacles when we rotate away,
        # because costmap2d ignores 0's and Out of Range!
        # Fill array with artificial_far_distance (not 0) and then overlap with real readings
        ir_ranges = [artificial_far_distance] * num_readings

        # New idea here:
        # First, I do not think that this can be used for reliable for map generation.
        # If your room has objects that the Kinect
        # cannot map, then you will probably need to modify the room (cover mirrors, etc.) or try
        # other laser scanner options.
        # SO, since we only want to use it for cost planning, we should modify the data, because
        # it is easy for it to get bogged down with a lot of "stuff" everywhere.

        # From:
        # http://answers.ros.org/question/11446/costmaps-obstacle-does-not-clear-properly-under-sparse-environment/
        # "When clearing obstacles, costmap_2d only trusts laser scans returning a definite range.
        # Indoors, that makes sense. Outdoors, most scans return max range, which does not clear
        # intervening obstacles. A fake scan with slightly shorter ranges can be constructed that
        # does clear them out."
        # SO, we need to set all "hits" above the distance we want to pay attention to to a distance very far away,
        # but just within the range_max (which we can set to anything we want),
        # otherwise costmap will not clear items!
        # Also, 0 does not clear anything! So if we rotate, then it gets 0 at that point, and ignores it,
        # so we need to fill the unused slots with long distances.
        # NOTE: This does cause a "circle" to be drawn around the robot at the "artificalFarDistance",
        # but it shouldn't be a problem because we set
        # artificial_far_distance to a distance greater than the planner uses.
        # So while it clears things, it shouldn't cause a problem, and the Kinect should override it for things
        # in between.

        # Use:
        # roslaunch arlobot_rviz_launchers view_robot.launch
        # to view this well for debugging and testing.

        # Note that sensor orientation is important here!
        # If you have a different number or aim them differently this will not work!
        # TODO: Tweak this value based on real measurements!
        # TODO: Use both IR and PING sensors?
        # The offset between the pretend sensor location in the URDF
        # and real location needs to be added to these values. This may need to be tweaked.
        sensor_offset = 0.217 # Measured, Calculated: 0.22545
        # This will be the max used range, anything beyond this is set to "artificial_far_distance"
        max_range_accepted = .5

        # max_range_accepted Testing:
        # TODO: More tweaking here could be done.
        # I think it is a trade-off, so there is no end to the adjustment that could be done.
        # I did a lot of testing with gmappingn while building a map.
        # Obviously this would be slightly different from using a map we do not update.
        # It seems there are so many variables here that testing is difficult.
        # We could find one number works great in one situation but is hopeless in another.
        # Having a comprehensive test course to test in multiple modes for every possible value would be great,
        # but I think it would take months! :)
        # REMEMBER, the costmap only pays attention out to a certain set
        # for obstacle_range in costmap_common_params.yaml anyway.
        # Here are my notes for different values of "max_range_accepted":
        # 1 - looks good, and works ok, but
        # I am afraid that the costmap gets confused with things popping in and out of sight all of the time,
        # causing undue wandering.
        # 2 - This producing less wandering due to things popping in and out of the field of view,
        # BUT it also shows that we get odd affects at longer distances. i.e.
        #     A doorframe almost always has a hit right in the middle of it.
        #     In a hallway there is often a hit in the middle about 1.5 meters out.
        # .5 - This works very well to have the PING data ONLY provide obstacle avoidance,
        # and immediately forget about said obstacles.
        #     This prevents the navigation stack from fighting with the Activity Board code's
        # built in safety stops, and instead navigate around obstacles before the Activity Board
        # code gets involved (other than to set speed reductions).
        #     The only down side is if you tell ArloBot to go somewhere that he cannot due to low obstacles,
        # he will try forever. He won't just bounce off of the obstacle,
        #     but he will keep trying it and then go away, turn around,
        # and try again over and over. He may even start wandering around
        # the facility trying to find another way in,
        #     but he will eventually come back and try it again.
        #     I'm not sure what the solution to this is though,
        # because avoiding low lying obstacles and adding low lying
        # features to the map are really two different things.
        #     I think if this is well tuned to avoid low lying obstacles it
        # probably will not work well for mapping features.
        #     IF we could map features with the PING sensors, we wouldn't need the 3D sensor. :)
        # TODO: One option may be more PING sensors around back.
        # Right now when the robot spins, it clears the obstacles behind it,
        # because there are fewer sensors on the back side.
        # If the obstacle was seen all of the way around the robot, in the same spot,
        # it may stop returning to the same location as soon as it turns around?

        #     NOTE: The bump sensors on Turtlebot mark but do not clear.
        # I'm not sure how that works out. It seems like every bump would
        # end up being a "blot" in the landscape never to be returned to,
        # but maybe there is something I am missing?

        # NOTE: Could this be different for PING vs. IR?
        # Currently I'm not using IR! Just PING. The IR is not being used by costmap.
        # It is here for seeing in RVIZ, and the Propeller board uses it for emergency stopping,
        # but costmap isn't watching it at the moment. I think it is too erratic for that.

        try:
            sensor_data = json.loads(line_parts[7])
        except:
            return
        ping = [artificial_far_distance] * 10
        ir = [artificial_far_distance] * len(ping)

        # Convert cm to meters and add offset
        for i in range(0, len(ping)):
            # ping[0] = (int(line_parts[7]) / 100.0) + sensor_offset
            ping[i] = (sensor_data.get('p' + str(i), artificial_far_distance * 100) / 100.0) + sensor_offset
            # Set to "out of range" for distances over "max_range_accepted" to clear long range obstacles
            # and use this for near range only.
            if ping[i] > max_range_accepted:
                # Be sure "ultrasonic_scan.range_max" is set higher than this or
                # costmap will ignore these and not clear the cost map!
                ping[i] = artificial_far_distance
            ir[i] = (sensor_data.get('i' + str(i), artificial_far_distance * 100) / 100.0) + sensor_offset  # Convert cm to meters and add offset

        # The sensors are 11cm from center to center at the front of the base plate.
        # The radius of the base plate is 22.545 cm
        # = 28 degree difference (http://ostermiller.org/calc/triangle.html)

        sensor_seperation = 28

        # Spread code: NO LONGER USED
        # TODO: This could make sense to return to if used properly,
        # allowing obstacles to "fill" the space and smoothly move "around"
        # the robot as it rotates and objects move across the view of the PING
        # sensors, instead of "jumping" from one point to the next.
        # # "sensor_spread" is how wide we expand the sensor "point" in the fake laser scan.
        # # For the purpose of obstacle avoidance, I think this can actually be a single point,
        # # Since the costmap inflates these anyway.
        #
        # #One issue I am having is it seems that the "ray trace" to the maximum distance
        # #may not line up with near hits, so that the global cost map is not being cleared!
        # #Switching from a "spread" to a single point may fix this?
        # #Since the costmap inflates obstacles anyway, we shouldn't need the spread should we?
        #
        # #sensor_spread = 10 # This is how wide of an arc (in degrees) to paint for each "hit"
        # #sensor_spread = 2 # Testing. I think it has to be even numbers?
        #
        # #NOTE:
        # #This assumes that things get bigger as they are further away. This is true of the PING's area,
        # #and while it may or may not be true of the object the PING sees, we have no way of knowing if
        # #the object fills the ping's entire field of view or only a small part of it, a "hit" is a "hit".
        # #However for the IR sensor, the objects are points, that are the same size regardless of distance,
        # #so we are clearly inflating them here.
        #
        # for x in range(180 - sensor_spread / 2, 180 + sensor_spread / 2):
        #     PINGranges[x] = ping[5] # Rear Sensor
        #     IRranges[x] = ir[5] # Rear Sensor
        #
        # for x in range((360 - sensor_seperation * 2) - sensor_spread / 2,
        #                (360 - sensor_seperation * 2) + sensor_spread / 2):
        #     PINGranges[x] = ping[4]
        #     IRranges[x] = ir[4]
        #
        # for x in range((360 - sensor_seperation) - sensor_spread / 2,
        #                (360 - sensor_seperation) + sensor_spread / 2):
        #     PINGranges[x] = ping[3]
        #     IRranges[x] = ir[3]
        #
        # for x in range(360 - sensor_spread / 2, 360):
        #     PINGranges[x] = ping[2]
        #     IRranges[x] = ir[2]
        # # Crosses center line
        # for x in range(0, sensor_spread /2):
        #     PINGranges[x] = ping[2]
        #     IRranges[x] = ir[2]
        #
        # for x in range(sensor_seperation - sensor_spread / 2, sensor_seperation + sensor_spread / 2):
        #     PINGranges[x] = ping[1]
        #     IRranges[x] = ir[1]
        #
        # for x in range((sensor_seperation * 2) - sensor_spread / 2, (sensor_seperation * 2) + sensor_spread / 2):
        #     PINGranges[x] = ping[0]
        #     IRranges[x] = ir[0]

        # Single Point code:
        #for x in range(180 - sensor_spread / 2, 180 + sensor_spread / 2):
        ping_ranges[180 + sensor_seperation * 2] = ping[5]
        ir_ranges[180 + sensor_seperation * 2] = ir[5]

        ping_ranges[180 + sensor_seperation] = ping[6]
        ir_ranges[180 + sensor_seperation] = ir[6]

        ping_ranges[180] = ping[7]  # Rear Sensor
        ir_ranges[180] = ir[7]  # Rear Sensor

        ping_ranges[180 - sensor_seperation] = ping[8]
        ir_ranges[180 - sensor_seperation] = ir[8]

        ping_ranges[180 - sensor_seperation * 2] = ping[9]
        ir_ranges[180 - sensor_seperation * 2] = ir[9]

        # for x in range((360 - sensor_seperation * 2) - sensor_spread / 2,
        #                (360 - sensor_seperation * 2) + sensor_spread / 2):
        ping_ranges[360 - sensor_seperation * 2] = ping[4]
        ir_ranges[360 - sensor_seperation * 2] = ir[4]

        # for x in range((360 - sensor_seperation) - sensor_spread / 2,
        #                (360 - sensor_seperation) + sensor_spread / 2):
        ping_ranges[360 - sensor_seperation] = ping[3]
        ir_ranges[360 - sensor_seperation] = ir[3]

        #for x in range(360 - sensor_spread / 2, 360):
        #PINGranges[x] = ping[2]
        #IRranges[x] = ir[2]
        # Crosses center line
        #for x in range(0, sensor_spread /2):
        ping_ranges[0] = ping[2]
        ir_ranges[0] = ir[2]

        #for x in range(sensor_seperation - sensor_spread / 2, sensor_seperation + sensor_spread / 2):
        ping_ranges[sensor_seperation] = ping[1]
        ir_ranges[sensor_seperation] = ir[1]

        #for x in range((sensor_seperation * 2) - sensor_spread / 2, (sensor_seperation * 2) + sensor_spread / 2):
        ping_ranges[sensor_seperation * 2] = ping[0]
        ir_ranges[sensor_seperation * 2] = ir[0]

        # LaserScan: http://docs.ros.org/api/sensor_msgs/html/msg/LaserScan.html
        ultrasonic_scan = LaserScan()
        infrared_scan = LaserScan()
        ultrasonic_scan.header.stamp = ros_now
        infrared_scan.header.stamp = ros_now
        ultrasonic_scan.header.frame_id = "ping_sensor_array"
        infrared_scan.header.frame_id = "ir_sensor_array"
        # For example:
        #scan.angle_min = -45 * M_PI / 180; // -45 degree
        #scan.angle_max = 45 * M_PI / 180;   // 45 degree
        # if you want to receive a full 360 degrees scan,
        # you should try setting min_angle to -pi/2 and max_angle to 3/2 * pi.
        # Radians: http://en.wikipedia.org/wiki/Radian#Advantages_of_measuring_in_radians
        ultrasonic_scan.angle_min = 0
        infrared_scan.angle_min = 0
        #ultrasonic_scan.angle_max = 2 * 3.14159 # Full circle # Letting it use default, which I think is the same.
        #infrared_scan.angle_max = 2 * 3.14159 # Full circle # Letting it use default, which I think is the same.
        #ultrasonic_scan.scan_time = 3 # I think this is only really applied for 3D scanning
        #infrared_scan.scan_time = 3 # I think this is only really applied for 3D scanning
        # Make sure the part you divide by num_readings is the same as your angle_max!
        # Might even make sense to use a variable here?
        ultrasonic_scan.angle_increment = (2 * 3.14) / num_readings
        infrared_scan.angle_increment = (2 * 3.14) / num_readings
        ultrasonic_scan.time_increment = (1 / laser_frequency) / num_readings
        infrared_scan.time_increment = (1 / laser_frequency) / num_readings
        # From: http://www.parallax.com/product/28015
        # Range: approximately 1 inch to 10 feet (2 cm to 3 m)
        # This should be adjusted based on the imaginary distance between the actual laser
        # and the laser location in the URDF file.
        # in Meters Distances below this number will be ignored REMEMBER the offset!
        ultrasonic_scan.range_min = 0.02
        # in Meters Distances below this number will be ignored REMEMBER the offset!
        infrared_scan.range_min = 0.02
        # This has to be above our "artificial_far_distance",
        # otherwise "hits" at artificial_far_distance will be ignored,
        # which means they will not be used to clear the cost map!
        # in Meters Distances above this will be ignored
        ultrasonic_scan.range_max = artificial_far_distance + 1
        # in Meters Distances above this will be ignored
        infrared_scan.range_max = artificial_far_distance + 1
        ultrasonic_scan.ranges = ping_ranges
        infrared_scan.ranges = ir_ranges
        # "intensity" is a value specific to each laser scanner model.
        # It can safely be ignored

        self._UltraSonicPublisher.publish(ultrasonic_scan)
        self._InfraredPublisher.publish(infrared_scan)
Exemple #54
0
if is_eye_on_hand:
    calibration_origin_frame = robot_effector_frame
    marker_placement_origin_frame = robot_base_frame
else:
    calibration_origin_frame = robot_base_frame
    marker_placement_origin_frame = robot_effector_frame

# set the marker placement in the buffer, so that we can measure the tracking output
# but the transform doesn't show up in tf (more realistic to the viewer, and no loops in the DAG)
arbitrary_transform_msg_stmpd = TransformStamped(
    header=Header(frame_id=marker_placement_origin_frame,
                  stamp=rospy.Time.now()),
    child_frame_id=MARKER_DUMMY_FRAME,
    transform=Transform(
        translation=Vector3(*arbitrary_marker_placement_transformation[0]),
        rotation=Quaternion(*arbitrary_marker_placement_transformation[1])))
tfBuffer.set_transform_static(arbitrary_transform_msg_stmpd,
                              'default_authority')

if is_calibration:
    # publish a dummy calibration for visualization purposes
    calib_gt_msg_stmpd = TransformStamped(
        header=Header(frame_id=calibration_origin_frame),
        child_frame_id=tracking_base_frame,
        transform=Transform(
            translation=Vector3(*ground_truth_calibration_transformation[0]),
            rotation=Quaternion(*ground_truth_calibration_transformation[1])))
    tfStaticBroadcaster.sendTransform(calib_gt_msg_stmpd)

# set the ground truth calibration; during demo this allows us to compute the correct tracking output even if the calibration failed
calib_gt_msg_stmpd = TransformStamped(
Exemple #55
0
 def reset_held_piece(self):
     print 'Resetting held piece'
     quat = Quaternion(*quaternion_from_euler(1.57971, 0.002477, 3.11933))
     pose = Pose(Point(0.841529, 0.209424, 0.501394), quat)
     self.set_pose('held_piece', pose)
    def __init__(self):
        rospy.init_node("waypoint_mission", anonymous=True)
        self.rate = rospy.Rate(ROS_RATE)
        self.idx_wp = 0
        self.frame_id = "odom"
        self.is_2d_mode = True

        self.home_wp = PoseStamped(header=Header(stamp=rospy.Time.now(),
                                                 frame_id=self.frame_id),
                                   pose=Pose(position=Point(0, 0, 1),
                                             orientation=Quaternion(
                                                 0, 0, 0, 1)))
        self.wps = []

        self.current_pose = copy.deepcopy(self.home_wp)
        self.height = 0

        # Information from gate detection algorithm
        self.object_count = 0
        self.zero_object_count = 0
        self.cur_target_bbox = None

        self.pos_ctrl_cmd = Twist()
        self.pos_ctrl_cmd_ekf = Twist()
        self.vision_control_command = Twist()
        self.zero_control_command = Twist()
        self.slam_path_msg = Path()

        # PUBLISHER
        self.pub_take_off = rospy.Publisher('/tello/takeoff',
                                            Empty,
                                            queue_size=1)
        self.pub_land = rospy.Publisher('/tello/land', Empty, queue_size=1)
        self.pub_ctrl_cmd = rospy.Publisher('/tello/cmd_vel',
                                            Twist,
                                            queue_size=1)
        self.pub_ctrl_cmd_to_ekf = rospy.Publisher('/cmd_vel',
                                                   Twist,
                                                   queue_size=1)
        self.pub_fast_mode = rospy.Publisher('/tello/fast_mode',
                                             Empty,
                                             queue_size=1)
        self.pub_wps_path = rospy.Publisher("/wps_path", Path, queue_size=1)
        self.pub_err_x_img = rospy.Publisher("/err_x_img",
                                             Float64,
                                             queue_size=1)
        self.pub_err_y_img = rospy.Publisher("/err_y_img",
                                             Float64,
                                             queue_size=1)

        # rospy.loginfo("Waiting for /set_ref_pose from drone_controller node")
        # rospy.wait_for_service('/set_ref_pose')

        self.update_target_call = rospy.ServiceProxy('/set_ref_pose',
                                                     SetRefPose)
        self.move_drone_call = rospy.ServiceProxy('/move_drone_w', MoveDroneW)
        self.srv_cli_up = rospy.ServiceProxy('/tello/up', MoveUp)
        self.srv_cli_down = rospy.ServiceProxy('/tello/down', MoveDown)

        # SUBSCRIBER
        # rospy.loginfo("Waiting for openvslam pose")
        # rospy.wait_for_message('/openvslam/camera_pose', PoseStamped)

        # self.sub_pose   = rospy.Subscriber('/openvslam/camera_pose', PoseWithCovarianceStamped, self.cb_pose)
        self.sub_ekf_pose = rospy.Subscriber('/tf', TFMessage,
                                             self.cb_ekf_pose)
        self.sub_pos_ux = rospy.Subscriber('/pid_roll/control_effort', Float64,
                                           self.cb_pos_ux)
        self.sub_pos_uy = rospy.Subscriber('/pid_pitch/control_effort',
                                           Float64, self.cb_pos_uy)
        self.sub_pos_uz = rospy.Subscriber('/pid_thrust/control_effort',
                                           Float64, self.cb_pos_uz)
        self.sub_pos_uyaw = rospy.Subscriber('/pid_yaw/control_effort',
                                             Float64, self.cb_pos_uyaw)

        self.tello_status_sub = rospy.Subscriber('/tello/status', TelloStatus,
                                                 self.cb_tello_status)

        rospy.sleep(1)
from geometry_msgs.msg import Quaternion
import rospy
from std_msgs.msg import Empty

import numpy as np
import cv2
import imutils

hardcoded_yaw = -90
pause_maxIters = 200
#current_yaw = 0
pause_iter = 0
yaw_done = 0

flag = 0
velocity = Quaternion()
bridge = CvBridge()
mask_pub = rospy.Publisher('/mask', Image, queue_size=1)
vel_pub = rospy.Publisher('/moveto_cmd_body', Quaternion, queue_size=1)
pub_land= rospy.Publisher('bebop/land',Empty,queue_size=1)
def find_circles(my_img):
    global flag, pause_iter, hardcoded_yaw, pause_maxIters,yaw_done
    img = bridge.imgmsg_to_cv2(my_img, "bgr8")
    
    img_orig=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
    clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,8))

    img_orig = clahe.apply(img_orig)
    img = cv2.medianBlur(img_orig,3)
    ret,thresh_binary = cv2.threshold(img,210,255,cv2.THRESH_BINARY)
    dilate = cv2.dilate(thresh_binary, np.ones((3,3), np.uint8), iterations=1)
    def create_grasp(self, pose, grasp_id):
        """
        :type pose: Pose
            pose of the gripper for the grasp
        :type grasp_id: str
            name for the grasp
        :rtype: Grasp
        """
        g = Grasp()
        g.id = grasp_id

        pre_grasp_posture = JointTrajectory()
        pre_grasp_posture.header.frame_id = self._grasp_postures_frame_id
        pre_grasp_posture.joint_names = [
            name for name in self._gripper_joint_names.split()
        ]
        jtpoint = JointTrajectoryPoint()
        jtpoint.positions = [
            float(pos) for pos in self._gripper_pre_grasp_positions.split()
        ]
        jtpoint.time_from_start = rospy.Duration(self._time_pre_grasp_posture)
        pre_grasp_posture.points.append(jtpoint)

        grasp_posture = copy.deepcopy(pre_grasp_posture)
        grasp_posture.points[0].time_from_start = rospy.Duration(
            self._time_pre_grasp_posture + self._time_grasp_posture)
        jtpoint2 = JointTrajectoryPoint()
        jtpoint2.positions = [
            float(pos) for pos in self._gripper_grasp_positions.split()
        ]
        jtpoint2.time_from_start = rospy.Duration(
            self._time_pre_grasp_posture + self._time_grasp_posture +
            self._time_grasp_posture_final)
        jtpoint2.effort = [-2.5] * len(jtpoint2.positions)
        grasp_posture.points.append(jtpoint2)

        g.pre_grasp_posture = pre_grasp_posture
        g.grasp_posture = grasp_posture

        header = Header()
        header.frame_id = self._grasp_pose_frame_id  # base_footprint
        q = [
            pose.orientation.x, pose.orientation.y, pose.orientation.z,
            pose.orientation.w
        ]
        # Fix orientation from gripper_link to parent_link (tool_link)
        fix_tool_to_gripper_rotation_q = quaternion_from_euler(
            math.radians(self._fix_tool_frame_to_grasping_frame_roll),
            math.radians(self._fix_tool_frame_to_grasping_frame_pitch),
            math.radians(self._fix_tool_frame_to_grasping_frame_yaw))
        q = quaternion_multiply(q, fix_tool_to_gripper_rotation_q)
        fixed_pose = copy.deepcopy(pose)
        fixed_pose.orientation = Quaternion(*q)

        g.grasp_pose = PoseStamped(header, fixed_pose)
        g.grasp_quality = self._grasp_quality

        g.pre_grasp_approach = GripperTranslation()
        g.pre_grasp_approach.direction.vector.x = self._pre_grasp_direction_x  # NOQA
        g.pre_grasp_approach.direction.vector.y = self._pre_grasp_direction_y  # NOQA
        g.pre_grasp_approach.direction.vector.z = self._pre_grasp_direction_z  # NOQA
        g.pre_grasp_approach.direction.header.frame_id = self._grasp_postures_frame_id  # NOQA
        g.pre_grasp_approach.desired_distance = self._grasp_desired_distance  # NOQA
        g.pre_grasp_approach.min_distance = self._grasp_min_distance
        g.post_grasp_retreat = GripperTranslation()
        g.post_grasp_retreat.direction.vector.x = self._post_grasp_direction_x  # NOQA
        g.post_grasp_retreat.direction.vector.y = self._post_grasp_direction_y  # NOQA
        g.post_grasp_retreat.direction.vector.z = self._post_grasp_direction_z  # NOQA
        g.post_grasp_retreat.direction.header.frame_id = self._grasp_postures_frame_id  # NOQA
        g.post_grasp_retreat.desired_distance = self._grasp_desired_distance  # NOQA
        g.post_grasp_retreat.min_distance = self._grasp_min_distance

        g.max_contact_force = self._max_contact_force
        g.allowed_touch_objects = self._allowed_touch_objects

        return g
Exemple #59
0
    def __init__(self):
        rospy.init_node('nav_test', anonymous=True)

        rospy.on_shutdown(self.shutdown)

        # How long in seconds should the robot pause at each location?
        self.rest_time = rospy.get_param("~rest_time", 10)

        # Are we running in the fake simulator?
        self.fake_test = rospy.get_param("~fake_test", False)

        # Goal state return values
        goal_states = [
            'PENDING', 'ACTIVE', 'PREEMPTED', 'SUCCEEDED', 'ABORTED',
            'REJECTED', 'PREEMPTING', 'RECALLING', 'RECALLED', 'LOST'
        ]

        # Set up the goal locations. Poses are defined in the map frame.
        # An easy way to find the pose coordinates is to point-and-click
        # Nav Goals in RViz when running in the simulator.
        # Pose coordinates are then displayed in the terminal
        # that was used to launch RViz.

        locations = dict()

        locations['hall_foyer'] = Pose(Point(4.535, -0.426, 0.036),
                                       Quaternion(0.000, 0.000, 0.223, 0.975))
        locations['hall_kitchen'] = Pose(
            Point(10.035, -0.452, 3.127),
            Quaternion(0.000, 0.000, -0.670, 0.743))
        #locations['hall_bedroom'] = Pose(Point(-4.305, -12.303, 0.0), Quaternion(0.000, 0.000, 0.733, 0.680))
        #locations['living_room_1'] = Pose(Point(-10.951, -3.197, 0.0), Quaternion(0.000, 0.000, 0.786, 0.618))
        #locations['living_room_2'] = Pose(Point(-9.984, -11.494, 0.0), Quaternion(0.000, 0.000, 0.480, 0.877))
        #locations['dining_room_1'] = Pose(Point(-15.937, -10.352, 0.0), Quaternion(0.000, 0.000, 0.892, -0.451))

        # Publisher to manually control the robot (e.g. to stop it)
        self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist)

        # Subscribe to the move_base action server
        self.move_base = actionlib.SimpleActionClient("move_base",
                                                      MoveBaseAction)

        rospy.loginfo("Waiting for move_base action server...")

        # Wait 60 seconds for the action server to become available
        self.move_base.wait_for_server(rospy.Duration(60))

        rospy.loginfo("Connected to move base server")

        # A variable to hold the initial pose of the robot to be set by
        # the user in RViz
        initial_pose = PoseWithCovarianceStamped()

        # Variables to keep track of success rate, running time,
        # and distance traveled
        n_locations = len(locations)
        n_goals = 0
        n_successes = 0
        i = n_locations
        distance_traveled = 0
        start_time = rospy.Time.now()
        running_time = 0
        location = ""
        last_location = ""

        # Get the initial pose from the user
        rospy.loginfo(
            "*** Click the 2D Pose Estimate button in RViz to set the robot's initial pose..."
        )
        rospy.wait_for_message('initialpose', PoseWithCovarianceStamped)
        self.last_location = Pose()
        rospy.Subscriber('initialpose', PoseWithCovarianceStamped,
                         self.update_initial_pose)

        # Make sure we have the initial pose
        while initial_pose.header.stamp == "":
            rospy.sleep(1)

        rospy.loginfo("Starting navigation test")

        # Begin the main loop and run through a sequence of locations
        while not rospy.is_shutdown():
            # If we've gone through the current sequence,
            # start with a new random sequence
            if i == n_locations:
                i = 0
                sequence = sample(locations, n_locations)
                # Skip over first location if it is the same as
                # the last location
                if sequence[0] == last_location:
                    i = 1

            # Get the next location in the current sequence
            location = sequence[i]

            # Keep track of the distance traveled.
            # Use updated initial pose if available.
            if initial_pose.header.stamp == "":
                distance = sqrt(
                    pow(
                        locations[location].position.x -
                        locations[last_location].position.x, 2) + pow(
                            locations[location].position.y -
                            locations[last_location].position.y, 2))
            else:
                rospy.loginfo("Updating current pose.")
                distance = sqrt(
                    pow(
                        locations[location].position.x -
                        initial_pose.pose.pose.position.x, 2) + pow(
                            locations[location].position.y -
                            initial_pose.pose.pose.position.y, 2))
                initial_pose.header.stamp = ""

            # Store the last location for distance calculations
            last_location = location

            # Increment the counters
            i += 1
            n_goals += 1

            # Set up the next goal location
            self.goal = MoveBaseGoal()
            self.goal.target_pose.pose = locations[location]
            self.goal.target_pose.header.frame_id = 'map'
            self.goal.target_pose.header.stamp = rospy.Time.now()

            # Let the user know where the robot is going next
            rospy.loginfo("Going to: " + str(location))

            # Start the robot toward the next location
            self.move_base.send_goal(self.goal)

            # Allow 5 minutes to get there
            finished_within_time = self.move_base.wait_for_result(
                rospy.Duration(300))

            # Check for success or failure
            if not finished_within_time:
                self.move_base.cancel_goal()
                rospy.loginfo("Timed out achieving goal")
            else:
                state = self.move_base.get_state()
                if state == GoalStatus.SUCCEEDED:
                    rospy.loginfo("Goal succeeded!")
                    n_successes += 1
                    distance_traveled += distance
                    rospy.loginfo("State:" + str(state))
                else:
                    rospy.loginfo("Goal failed with error code: " +
                                  str(goal_states[state]))

            # How long have we been running?
            running_time = rospy.Time.now() - start_time
            running_time = running_time.secs / 60.0

            # Print a summary success/failure, distance traveled and time elapsed
            rospy.loginfo("Success so far: " + str(n_successes) + "/" +
                          str(n_goals) + " = " +
                          str(100 * n_successes / n_goals) + "%")
            rospy.loginfo("Running time: " + str(trunc(running_time, 1)) +
                          " min Distance: " +
                          str(trunc(distance_traveled, 1)) + " m")
            rospy.sleep(self.rest_time)
Exemple #60
0
def hppPoseToSotTransRPY(pose):
    from hpp import Quaternion
    q = Quaternion(pose[3:7])
    return pose[0:3] + q.toRPY().tolist()