def bayes2spencer(self, bayes_msg): spencer_msg = TrackedPersons() spencer_msg.header = bayes_msg.header for i, pose in enumerate(bayes_msg.poses): track = TrackedPerson() track.track_id = self.string2uint64(bayes_msg.uuids[i]) # PoseWithCovariance track.pose.pose.position = pose.position track.pose.pose.orientation = pose.orientation # TwistWithCovariance track.twist.twist.linear = bayes_msg.velocities[i] # Data not in bayes. Not sure about these ... track.pose.covariance = (np.random.normal(0.3, 0.1) * np.identity(6)).flatten().tolist() track.twist.covariance = (np.random.normal(0.3, 0.1) * np.identity(6)).flatten().tolist() # we assume 0 here # track.twist.twist.angular track.is_occluded = False track.is_matched = False track.detection_id = self.string2uint64(bayes_msg.uuids[i]) track.age = 0 spencer_msg.tracks.append(track) return spencer_msg
def detectedPersonsCallback(trackAssociation, detectedPersons, syncronized_person_pub): age = rospy.Time.now() - detectedPersons.header.stamp output = "New detections with track association available (age of detections = %.2f sec)! Detection to track association: " % age.to_sec( ) syncronizedPersons = TrackedPersons() syncronizedPersons.header = detectedPersons.header if detectedPersons.detections: for detectedPerson in detectedPersons.detections: # The TrackSynchronizer invoking this callback guarantees that the detectedPersons message is buffered until a # track association is available for these detections (by comparing message timestamps of tracks and detections). detectionId = detectedPerson.detection_id trackId = trackAssociation.lookupTrackId( detectionId) # <-- this is what this is all about output += "\n[det %d --> track %s]" % (detectionId, str(trackId)) if trackId is not None: syncronizedPerson = TrackedPerson() syncronizedPerson.track_id = trackId syncronizedPerson.detection_id = detectionId syncronizedPersons.tracks.append(syncronizedPerson) rospy.loginfo(output) if len(syncronizedPersons.tracks) > 0: syncronized_person_pub.publish(syncronizedPersons) else: output += "Empty set of detections!"
def main(): # Main code trackPublisher = rospy.Publisher('/spencer/perception/tracked_persons', TrackedPersons, queue_size=1) rospy.init_node('mock_tracks') rate = rospy.Rate(10) seqCounter = 0 while not rospy.is_shutdown(): trackedPersons = TrackedPersons() trackedPersons.header.seq = seqCounter trackedPersons.header.frame_id = "odom" trackedPersons.header.stamp = rospy.Time.now() # trackedPersons.tracks.append( # createTrackedPerson( trackId, x, y, theta ) ) trackedPersons.tracks.append(createTrackedPerson(1, 6.1, 4, 181)) trackedPersons.tracks.append(createTrackedPerson(2, 6, 5.1, 168.7)) trackedPersons.tracks.append(createTrackedPerson(3, 7, 7, 182)) trackedPersons.tracks.append(createTrackedPerson(4, 6.8, 8.0, -168.7)) trackedPersons.tracks.append(createTrackedPerson(5, 6.2, 8.7, -168.3)) # some two loners trackedPersons.tracks.append(createTrackedPerson(6, 12.2, 11.7, 68.3)) trackedPersons.tracks.append(createTrackedPerson(7, 12.1, 8.7, -45)) trackPublisher.publish(trackedPersons) seqCounter += 1 rate.sleep()
def main(): # Main code trackPublisher = rospy.Publisher('/spencer/perception/tracked_persons', TrackedPersons) #obstaclesPublisher = rospy.Publisher('/pedsim/static_obstacles', GridCells ) rospy.init_node('mock_tracked_persons') rate = rospy.Rate(10) #obstacles = yaml.load(OBSTACLE_YAML) #obstacles = [ d for d in obstacles] seqCounter = 0 while not rospy.is_shutdown(): trackedPersons = TrackedPersons() trackedPersons.header.seq = seqCounter trackedPersons.header.frame_id = "odom" trackedPersons.header.stamp = rospy.Time.now() #trackedPersons.tracks.append( createTrackedPerson( trackId, x, y, theta ) ) trackedPersons.tracks.append(createTrackedPerson(1, 2, 5, 270)) trackedPersons.tracks.append(createTrackedPerson(2, 5, 3.5, 109)) trackedPersons.tracks.append(createTrackedPerson(3, 6, 5, 90)) trackedPersons.tracks.append(createTrackedPerson(4, 5, 7.2, 109)) trackedPersons.tracks.append( createTrackedPerson(5, 9.2, 1.2, 71.56 - 90)) trackedPersons.tracks.append( createTrackedPerson(6, 7.1, 2.5, 80.9097 - 90)) trackedPersons.tracks.append(createTrackedPerson(7, 8.2, 7.6, 8)) trackedPersons.tracks.append(createTrackedPerson(8, 7.1, 6.5, 10)) trackedPersons.tracks.append( createTrackedPerson(9, 2.2, 1.8, 85.2364 - 90)) trackedPersons.tracks.append( createTrackedPerson(10, 4.1, 1.9, 93.8141 - 90)) trackedPersons.tracks.append( createTrackedPerson(11, 1.7, 9.3, 78.6901 - 90)) trackedPersons.tracks.append( createTrackedPerson(12, 2.2, 7.5, 63.4349 - 90)) trackPublisher.publish(trackedPersons) #obstacles['header'] = trackedPersons.header #obstaclesPublisher.publish( obstacles ) seqCounter += 1 rate.sleep()
def main(): # Main code trackPublisher = rospy.Publisher('/spencer/perception/tracked_persons', TrackedPersons) rospy.init_node('mocktracks_info_screen') rate = rospy.Rate(10) seqCounter = 0 while not rospy.is_shutdown(): trackedPersons = TrackedPersons() trackedPersons.header.seq = seqCounter trackedPersons.header.frame_id = "odom" trackedPersons.header.stamp = rospy.Time.now() # trackedPersons.tracks.append( # createTrackedPerson( trackId, x, y, theta ) ) trackedPersons.tracks.append(createTrackedPerson(1, 3, 7, 270)) trackedPersons.tracks.append(createTrackedPerson(2, 7, 5.5, 109)) trackedPersons.tracks.append(createTrackedPerson(3, 8, 6.5, 90)) trackedPersons.tracks.append(createTrackedPerson(4, 7, 9.2, 109)) trackedPersons.tracks.append(createTrackedPerson(5, 7.5, 8.0, 109)) # trackedPersons.tracks.append( # createTrackedPerson(5, 9.2, 1.2, 71.56 - 90)) # trackedPersons.tracks.append( # createTrackedPerson(6, 7.1, 2.5, 80.9097 - 90)) # trackedPersons.tracks.append(createTrackedPerson(7, 8.2, 7.6, 8)) # trackedPersons.tracks.append(createTrackedPerson(8, 7.1, 6.5, 10)) # trackedPersons.tracks.append( # createTrackedPerson(9, 2.2, 1.8, 85.2364 - 90)) # trackedPersons.tracks.append( # createTrackedPerson(10, 4.1, 1.9, 93.8141 - 90)) # trackedPersons.tracks.append( # createTrackedPerson(11, 1.7, 9.3, 78.6901 - 90)) # trackedPersons.tracks.append( # createTrackedPerson(12, 2.2, 7.5, 63.4349 - 90)) trackPublisher.publish(trackedPersons) seqCounter += 1 rate.sleep()
def publish(self, trackedPersons, humanAttributes): humanAttributes.header = trackedPersons.header self.trackedPersonPublisher.publish(trackedPersons) self.humanAttributePublisher.publish(humanAttributes) # Separately publish the currently active track in the editor, if any if self.trackedPersonWithoutActivePublisher.get_num_connections( ) > 0 or self.activeTrackedPersonPublisher.get_num_connections() > 0: activeTrackedPersons = TrackedPersons() activeTrackedPersons.header = trackedPersons.header trackedPersonsWithoutActive = TrackedPersons() trackedPersonsWithoutActive.header = trackedPersons.header for trackedPerson in trackedPersons.tracks: if self.editor is None or trackedPerson.track_id != self.editor.activeTrackId: trackedPersonsWithoutActive.tracks.append(trackedPerson) else: activeTrackedPersons.tracks.append(trackedPerson) self.activeTrackedPersonPublisher.publish(activeTrackedPersons) self.trackedPersonWithoutActivePublisher.publish( trackedPersonsWithoutActive)
def main(): # Main code trackPublisher = rospy.Publisher('/spencer/perception/tracked_persons', TrackedPersons) #obstaclesPublisher = rospy.Publisher('/pedsim/static_obstacles', GridCells ) rospy.init_node('mock_tracked_persons') rate = rospy.Rate(10) #obstacles = yaml.load(OBSTACLE_YAML) #obstacles = [ d for d in obstacles] seqCounter = 0 while not rospy.is_shutdown(): trackedPersons = TrackedPersons() trackedPersons.header.seq = seqCounter trackedPersons.header.frame_id = "odom" trackedPersons.header.stamp = rospy.Time.now() #trackedPersons.tracks.append( createTrackedPerson( trackId, x, y, theta ) ) trackedPersons.tracks.append(createTrackedPerson(01, 5, 4, 90)) trackedPersons.tracks.append(createTrackedPerson(02, 6, 5.45878, 90)) trackedPersons.tracks.append(createTrackedPerson(03, 7.22, 5.70, 90)) trackedPersons.tracks.append( createTrackedPerson(04, 2 + 7.22, 7.33, 90)) trackedPersons.tracks.append( createTrackedPerson(05, 2 + 8.92, 8.42, 90)) trackedPersons.tracks.append( createTrackedPerson(06, 2 + 7.92, 10.41, 90)) trackedPersons.tracks.append(createTrackedPerson( 07, 2 + 7.2, 9.44, 90)) trackedPersons.tracks.append(createTrackedPerson(8, 2 + 7, 14 - 2, 90)) trackedPersons.tracks.append( createTrackedPerson(9, 2 + 6, 15.4123 - 2, 90)) trackedPersons.tracks.append( createTrackedPerson(10, 5 - 1, 18.595 - 5, 280)) trackedPersons.tracks.append( createTrackedPerson(11, 5 - 1, 20 - 5, 270)) trackedPersons.tracks.append( createTrackedPerson(12, 6 - 1, 21.5491 - 5, 240)) trackedPersons.tracks.append( createTrackedPerson(13, 7.48044 - 1, 19 - 5, 90)) trackedPersons.tracks.append(createTrackedPerson(14, 6, 24.5463, 45)) trackedPersons.tracks.append(createTrackedPerson(15, 8, 28, 90)) trackedPersons.tracks.append(createTrackedPerson(16, 10.4458, 23, 68)) trackedPersons.tracks.append(createTrackedPerson(17, 11.5004, 27, 88)) trackedPersons.tracks.append(createTrackedPerson(18, 14, 25.4389, 20)) trackedPersons.tracks.append(createTrackedPerson(19, 15, 21, 90)) trackedPersons.tracks.append(createTrackedPerson(20, 15, 22.4308, 92)) trackedPersons.tracks.append(createTrackedPerson(21, 15.4676, 24, 91)) trackedPersons.tracks.append( createTrackedPerson(22, 16.5423, 25.4178, 90)) trackedPersons.tracks.append(createTrackedPerson(23, 18, 20, 90)) trackedPersons.tracks.append( createTrackedPerson(24, 18.5532, 21.5011, 90)) trackedPersons.tracks.append( createTrackedPerson(25, 15.4739, 16.5314, 45)) trackedPersons.tracks.append(createTrackedPerson(26, 20, 25.5746, 90)) trackedPersons.tracks.append(createTrackedPerson(27, 21.5327, 24, 90)) trackedPersons.tracks.append(createTrackedPerson(28, 22, 26.4632, 90)) trackedPersons.tracks.append(createTrackedPerson(29, 21, 18, 45)) trackedPersons.tracks.append(createTrackedPerson(30, 23, 20.4335, 90)) trackedPersons.tracks.append( createTrackedPerson(31, 23.4972, 21.4055, 90)) trackedPersons.tracks.append( createTrackedPerson(32, 23.4025, 22.4749, 90)) trackedPersons.tracks.append( createTrackedPerson(33, 24.5281, 18.5868, 54)) trackedPersons.tracks.append( createTrackedPerson(34, 16.554, 3.40568 - 2, 94)) trackedPersons.tracks.append(createTrackedPerson(35, 16, 6 - 1, 94)) trackedPersons.tracks.append(createTrackedPerson(36, 20, 4, 0)) trackedPersons.tracks.append(createTrackedPerson(37, 19, 12, 25)) trackedPersons.tracks.append(createTrackedPerson(38, 23, 8, 50)) trackedPersons.tracks.append(createTrackedPerson(39, 24, 10, 90)) trackedPersons.tracks.append(createTrackedPerson(40, 25, 12, 120)) trackedPersons.tracks.append(createTrackedPerson(41, 7.51, 22.41, 80)) trackedPersons.tracks.append(createTrackedPerson(42, 8.21, 25.7, 81)) trackedPersons.tracks.append(createTrackedPerson(43, 3.31, 27.7, 81)) trackedPersons.tracks.append(createTrackedPerson(44, 11.421, 18.7, 75)) trackedPersons.tracks.append(createTrackedPerson(45, 25.21, 27.0, 85)) trackedPersons.tracks.append(createTrackedPerson(46, 18.23, 6.87, -91)) trackedPersons.tracks.append(createTrackedPerson(47, 18.6, 8.90, -90)) trackedPersons.tracks.append(createTrackedPerson(48, 20.4, 7.87, 85)) trackedPersons.tracks.append(createTrackedPerson( 49, 15.684, 10.74, 75)) trackedPersons.tracks.append(createTrackedPerson(50, 15.72, 14.51, 70)) trackPublisher.publish(trackedPersons) #obstacles['header'] = trackedPersons.header #obstaclesPublisher.publish( obstacles ) seqCounter += 1 rate.sleep()
def do_3d_tracking(self, detections_2d, detections_3d): start = time.time() #rospy.loginfo("Tracking frame") # convert_detections boxes_2d = [] boxes_3d = [] valid_3d = [] features_2d = [] features_3d = [] dets_2d = sorted(detections_2d.detection2d_with_features, key=lambda x: x.frame_det_id) dets_3d = sorted(detections_3d.detection3d_with_features, key=lambda x: x.frame_det_id) i, j = 0, 0 while i < len(dets_2d) and j < len(dets_3d): det_2d = dets_2d[i] det_3d = dets_3d[j] if det_2d.frame_det_id == det_3d.frame_det_id: i += 1 j += 1 valid_3d.append(det_3d.valid) boxes_2d.append( np.array([ det_2d.x1, det_2d.y1, det_2d.x2, det_2d.y2, 1, -1, -1 ])) features_2d.append(torch.Tensor(det_2d.feature).to('cuda:0')) if det_3d.valid: boxes_3d.append( np.array([ det_3d.x, det_3d.y, det_3d.z, det_3d.l, det_3d.h, det_3d.w, det_3d.theta ])) features_3d.append( torch.Tensor(det_3d.feature).to('cuda:0')) else: boxes_3d.append(None) features_3d.append(None) elif det_2d.frame_det_id < det_3d.frame_det_id: i += 1 else: j += 1 if not boxes_3d: boxes_3d = None features_3d, features_2d = combine_features( features_2d, features_3d, valid_3d, self.combination_model, depth_weight=self.depth_weight) detections = convert_detections(boxes_2d, features_3d, features_2d, boxes_3d) self.tracker.predict() self.tracker.update(None, detections) tracked_array = TrackedPersons() tracked_array.header.stamp = detections_3d.header.stamp tracked_array.header.frame_id = 'occam' for track in self.tracker.tracks: if not track.is_confirmed(): continue #print('Confirmed track!') pose_msg = Pose() tracked_person_msg = TrackedPerson() tracked_person_msg.header.stamp = detections_3d.header.stamp tracked_person_msg.header.frame_id = 'occam' tracked_person_msg.track_id = track.track_id if track.time_since_update < 2: tracked_person_msg.is_matched = True else: tracked_person_msg.is_matched = False bbox = track.to_tlwh3d() covariance = track.get_cov().reshape(-1).tolist() pose_msg.position.x = bbox[0] pose_msg.position.y = bbox[1] - bbox[4] / 2 pose_msg.position.z = bbox[2] pose_msg = PoseWithCovariance(pose=pose_msg, covariance=covariance) tracked_person_msg.pose = pose_msg tracked_array.tracks.append(tracked_person_msg) self.tracker_output_pub.publish(tracked_array)
def newSegmentationReceived(self, laserscan, laserscanSegmentation): currentStamp = laserscanSegmentation.header.stamp pointCount = len(laserscan.ranges) cartesianCoordinates = [] # Required for velocity calculations if self._lastDataStamp is None: self._lastDataStamp = laserscanSegmentation.header.stamp # Build lookup of cartesian coordinates per laser point for pointIndex in xrange(0, pointCount): cartesianCoordinates.append( self.calculateCartesianCoordinates(laserscan, pointIndex)) # For each labelled segment, create and append one TrackedPerson and DetectedPerson message trackedPersons = TrackedPersons(header=laserscanSegmentation.header) detectedPersons = DetectedPersons(header=laserscanSegmentation.header) for segment in laserscanSegmentation.segments: # Calculate centroid of tracked person centroid = numpy.array([0.0, 0.0, 0.0]) for pointIndex in segment.measurement_indices: centroid += cartesianCoordinates[pointIndex] centroid /= float(len(segment.measurement_indices)) # Lookup previous centroid (for velocity/twist calculation), assume zero velocity at track initialization if not segment.label in self._previousCentroidLookup: self._previousCentroidLookup[ segment.label] = collections.deque() # Maintain centroid history centroidHistory = self._previousCentroidLookup[segment.label] while len(centroidHistory) > 20: centroidHistory.popleft() # Calculate average velocity over past few frames dt = 0 velocity = accumulatedVelocity = numpy.array([0.0, 0.0, 0.0]) if centroidHistory: previousCentroid = centroid previousStamp = currentStamp for historyStamp, historyCentroid in reversed(centroidHistory): accumulatedVelocity += previousCentroid - historyCentroid dt += abs((previousStamp - historyStamp).to_sec()) previousCentroid = historyCentroid previousStamp = historyStamp velocity = accumulatedVelocity / dt centroidHistory.append((currentStamp, centroid)) # Remember age of track if not segment.label in self._firstTrackEncounterLookup: self._firstTrackEncounterLookup[segment.label] = currentStamp # Initialize TrackedPerson message trackedPerson = TrackedPerson() trackedPerson.track_id = segment.label trackedPerson.age = currentStamp - self._firstTrackEncounterLookup[ segment.label] trackedPerson.detection_id = self._detectionIdCounter trackedPerson.is_occluded = False # Set position LARGE_VARIANCE = 99999999 trackedPerson.pose.pose.position.x = centroid[0] trackedPerson.pose.pose.position.y = centroid[1] trackedPerson.pose.pose.position.z = centroid[2] # Set orientation if dt > 0: yaw = math.atan2(velocity[1], velocity[0]) quaternion = tf.transformations.quaternion_from_euler( 0, 0, yaw) trackedPerson.pose.pose.orientation = Quaternion( x=quaternion[0], y=quaternion[1], z=quaternion[2], w=quaternion[3]) trackedPerson.pose.covariance[ 2 * 6 + 2] = trackedPerson.pose.covariance[ 3 * 6 + 3] = trackedPerson.pose.covariance[ 4 * 6 + 4] = LARGE_VARIANCE # z pos, roll, pitch # Set velocity if dt > 0: trackedPerson.twist.twist.linear.x = velocity[0] trackedPerson.twist.twist.linear.y = velocity[1] trackedPerson.twist.twist.linear.z = velocity[2] trackedPerson.twist.covariance[ 2 * 6 + 2] = trackedPerson.twist.covariance[ 3 * 6 + 3] = trackedPerson.twist.covariance[ 4 * 6 + 4] = trackedPerson.twist.covariance[ 5 * 6 + 5] = LARGE_VARIANCE # linear z, angular x, y, z # Append to list of tracked persons trackedPersons.tracks.append(trackedPerson) # Initialize DetectedPerson message by copying data from TrackedPerson detectedPerson = DetectedPerson() detectedPerson.detection_id = trackedPerson.detection_id detectedPerson.confidence = 1.0 detectedPerson.pose = copy.deepcopy(trackedPerson.pose) detectedPerson.pose.pose.orientation = Quaternion() for i in xrange(0, 2): detectedPerson.pose.covariance[i * 6 + i] = 0.17 * 0.17 detectedPerson.pose.covariance[5 * 6 + 5] = LARGE_VARIANCE # yaw detectedPersons.detections.append(detectedPerson) self._detectionIdCounter += 1 # Publish tracked persons self.trackedPersonsPublisher.publish(trackedPersons) self.detectedPersonsPublisher.publish(detectedPersons) self._lastDataStamp = laserscanSegmentation.header.stamp
def main(): # Main code trackPublisher = rospy.Publisher('/spencer/perception/tracked_persons', TrackedPersons) rospy.init_node('mock_tracked_persons') rate = rospy.Rate(10) # create speeds speeds = np.random.normal(1.34, 0.26, size=50) while not rospy.is_shutdown(): index = 0 counter = 0 tracked_persons = TrackedPersons() tracked_persons.header.seq = counter tracked_persons.header.frame_id = "odom" tracked_persons.header.stamp = rospy.Time.now() # (createTrackedPerson( trackId, x, y, theta, speed ) ) tracked_persons.tracks.append( createTrackedPerson(01, 5, 4, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(02, 6, 5.45878, 270, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(03, 1.3 + 7.22, 5.70, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(04, 7.22, 7.33, 290, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(05, 2 + 8.92, 8.42, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(06, 2 + 7.92, 10.41, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(07, 2 + 7.2, 9.44, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(8, 2 + 7, 14 - 2, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(9, 2 + 5.5, 14.4123 - 2, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(10, 5 - 1, 18.595 - 5, 280, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(11, 5 - 1, 20 - 5, 270, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(12, 6 - 1, 21.5491 - 5, 240, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(13, 7.48044 - 1, 19 - 5.7, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(14, 6, 24.5463, 45, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(15, 8, 28, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(16, 10.4458, 23, 68, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(17, 11.5004, 27, 88, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(18, 14, 25.4389, 20, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(19, 15, 21, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(20, 15, 22.4308, 92, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(21, 15.4676, 24, 91, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(22, 16.5423, 25.4178, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(23, 18, 20, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(24, 18.5532, 21.5011, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(25, 15.4739, 16.5314, 45, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(26, 20, 25.5746, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(27, 21.5327, 24, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(28, 22, 26.4632, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(29, 21, 18, 45, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(30, 23, 20.4335, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(31, 23.4972, 21.4055, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(32, 23.4025, 22.4749, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(33, 24.5281, 18.5868, 54, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(34, 16.554, 3.40568 - 2, 94, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(35, 14.8, 6 - 3, 94, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(36, 20, 4, 0, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(37, 19, 12, 25, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(38, 23, 8, 50, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(39, 24, 10, 90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(40, 25, 12, 120, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(41, 7.51, 22.41, 80, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(42, 8.21, 25.7, 81, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(43, 3.31, 27.7, 81, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(44, 11.421, 18.7, 75, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(45, 25.21, 27.0, 85, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(46, 18.23, 6.87, -91, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(47, 18.6, 8.90, -90, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(48, 20.4, 7.87, 85, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(49, 15.684, 10.74, 75, speeds[index])) tracked_persons.tracks.append( createTrackedPerson(50, 15.72, 13.51, 70, speeds[index])) trackPublisher.publish(tracked_persons) counter += 1 index += 1 rate.sleep()
currentCycle = 0 currentAngle = 0.0 angleStep = 4.5 * pi / 180. idShift = 0 updateRateHz = 10 # Test coordinate frame for checking if mapping into the fixed frame works correctly frameOffset = (0, 0, 0) frameOrientation = tf.transformations.quaternion_from_euler(0, 0, 0) # 90.0 / 180.0 * pi print("Sending test messages on " + observationTopic + " and " + trackTopic) rate = rospy.Rate(updateRateHz) while not rospy.is_shutdown(): br.sendTransform(frameOffset, frameOrientation, rospy.Time.now(), "test_tf_frame", "odom") trackedPersons = TrackedPersons() trackedPersons.header.frame_id = "/test_tf_frame" trackedPersons.header.stamp = rospy.Time.now() detectedPersons = DetectedPersons() detectedPersons.header = trackedPersons.header tracks = trackedPersons.tracks; detections = detectedPersons.detections; createTrackAndDetection(tracks, detections, idShift+0, 3, currentAngle, 2.0) createTrackAndDetection(tracks, detections, idShift+1, 7, currentAngle + pi / 2, 2.5) createTrackAndDetection(tracks, detections, idShift+2, -1, currentAngle + pi / 1, 3.0) createTrackAndDetection(tracks, detections, idShift+3, -1, currentAngle + pi * 1.5, cos(currentAngle) * 3.5 + 7.0) createTrackAndDetection(tracks, detections, idShift+4, 88, 0.0, 0.0)
def createTrackedPersons(self, timestamp): trackedPersons = TrackedPersons() trackedPersons.header.stamp = timestamp trackedPersons.header.frame_id = self.trackingFrame for trackKey, track in self.database.getTracks().iteritems(): waypoints = self.database.getWaypoints(track) if waypoints: firstWaypoint = waypoints[0] lastWaypoint = waypoints[-1] # Check if we are before the first or past the last waypoint beforeFirstWaypoint = timestamp.to_sec( ) < firstWaypoint["timestamp"] behindLastWaypoint = timestamp.to_sec( ) > lastWaypoint["timestamp"] if not beforeFirstWaypoint and not behindLastWaypoint: trackedPerson = TrackedPerson() trackedPerson.track_id = self.database.getTrackId(trackKey) trackedPerson.is_occluded = False trackedPerson.age = rospy.Time(firstWaypoint["timestamp"]) # Find adjacent waypoints for linear interpolation of tracked person's position previousWaypoint = nextWaypoint = None for waypoint in waypoints: previousWaypoint = nextWaypoint nextWaypoint = waypoint if nextWaypoint["timestamp"] > timestamp.to_sec(): break try: # If there is only one waypoint for this track, previousWaypoint will be None nextPosition = self.transformer.toCommonFrame( nextWaypoint, self.trackingFrame) nextTimestamp = nextWaypoint["timestamp"] # Determine waypoint segment duration and distance if previousWaypoint is not None: previousPosition = self.transformer.toCommonFrame( previousWaypoint, self.trackingFrame) previousTimestamp = previousWaypoint["timestamp"] segmentDuration = nextTimestamp - previousTimestamp alpha = (timestamp.to_sec() - previousTimestamp) / segmentDuration diffVector = nextPosition - previousPosition else: previousPosition = nextPosition segmentDuration = 1.0 alpha = 0.0 diffVector = numpy.array([0, 0]) except tf.Exception: continue # Linear interpolation interpolatedPosition = previousPosition + diffVector * alpha velocity = diffVector / segmentDuration trackedPerson.pose.pose.position.x, trackedPerson.pose.pose.position.y = interpolatedPosition trackedPerson.pose.covariance[ 2 * 6 + 2] = trackedPerson.pose.covariance[ 3 * 6 + 3] = trackedPerson.pose.covariance[4 * 6 + 4] = 99999999 yaw = math.atan2(velocity[1], velocity[0]) quaternion = tf.transformations.quaternion_from_euler( 0, 0, yaw) trackedPerson.pose.pose.orientation = Quaternion( x=quaternion[0], y=quaternion[1], z=quaternion[2], w=quaternion[3]) trackedPerson.twist.twist.linear.x, trackedPerson.twist.twist.linear.y = velocity trackedPersons.tracks.append(trackedPerson) return trackedPersons
def publish_obstacles(self): with self.lock: if self.transport_data is None: return # non-leg obstacles timestamp, xx, yy, clusters, is_legs, cogs, radii, tf_rob_in_fix = self.transport_data # tracks track_ids = [] tracks_latest_pos, tracks_color = [], [] tracks_in_frame, tracks_velocities = [], [] tracks_radii = [] for trackid in self.tracker.active_tracks: track = self.tracker.active_tracks[trackid] xy = np.array(track.pos_history[-1]) is_track_in_frame = True if trackid in self.tracker.latest_matches: color = (0.,1.,0.,1.) # green elif trackid in self.tracker.new_tracks: color = (0.,0.,1.,1.) # blue else: color = (0.7, 0.7, 0.7, 1.) is_track_in_frame = False track_ids.append(trackid) tracks_latest_pos.append(xy) tracks_color.append(color) tracks_in_frame.append(is_track_in_frame) tracks_velocities.append(track.estimate_velocity()) tracks_radii.append(track.avg_radius()) # publish trackedpersons from spencer_tracking_msgs.msg import TrackedPersons, TrackedPerson pub = rospy.Publisher("/tracked_persons", TrackedPersons, queue_size=1) tp_msg = TrackedPersons() tp_msg.header.frame_id = self.kFixedFrame tp_msg.header.stamp = timestamp for trackid, xy, in_frame, vel, radius in zip(track_ids, tracks_latest_pos, tracks_in_frame, tracks_velocities, tracks_radii): # if not in_frame: # continue tp = TrackedPerson() tp.track_id = trackid tp.is_occluded = False tp.is_matched = in_frame tp.detection_id = trackid tp.pose.pose.position.x = xy[0] tp.pose.pose.position.y = xy[1] heading_angle = np.arctan2(vel[1], vel[0]) # guess heading from velocity from geometry_msgs.msg import Quaternion tp.pose.pose.orientation = Quaternion( *tf.transformations.quaternion_from_euler(0, 0, heading_angle)) tp.twist.twist.linear.x = vel[0] tp.twist.twist.linear.y = vel[1] tp.twist.twist.angular.z = 0 # unknown tp_msg.tracks.append(tp) pub.publish(tp_msg) pub = rospy.Publisher('/obstacles', ObstacleArrayMsg, queue_size=1) obstacles_msg = ObstacleArrayMsg() obstacles_msg.header.stamp = timestamp obstacles_msg.header.frame_id = self.kFixedFrame for trackid, xy, in_frame, vel, radius in zip(track_ids, tracks_latest_pos, tracks_in_frame, tracks_velocities, tracks_radii): if not in_frame: continue # Add point obstacle obst = ObstacleMsg() obst.id = trackid obst.polygon.points = [Point32()] obst.polygon.points[0].x = xy[0] obst.polygon.points[0].y = xy[1] obst.polygon.points[0].z = 0 obst.radius = radius yaw = np.arctan2(vel[1], vel[0]) q = tf.transformations.quaternion_from_euler(0,0,yaw) obst.orientation = Quaternion(*q) obst.velocities.twist.linear.x = vel[0] obst.velocities.twist.linear.y = vel[1] obst.velocities.twist.linear.z = 0 obst.velocities.twist.angular.x = 0 obst.velocities.twist.angular.y = 0 obst.velocities.twist.angular.z = 0 obstacles_msg.obstacles.append(obst) pub.publish(obstacles_msg) pub = rospy.Publisher('/close_nonleg_obstacles', ObstacleArrayMsg, queue_size=1) MAX_DIST_STATIC_CLUSTERS_M = 3. cogs_in_fix = [] for i in range(len(cogs)): cogs_in_fix.append(apply_tf(cogs[i], Pose2D(tf_rob_in_fix))) obstacles_msg = ObstacleArrayMsg() obstacles_msg.header.stamp = timestamp obstacles_msg.header.frame_id = self.kFixedFrame for c, cog_in_fix, cog_in_rob, r, is_leg in zip(clusters, cogs_in_fix, cogs, radii, is_legs): if np.linalg.norm(cog_in_rob) < self.kRobotRadius: continue if len(c) < self.kMinClusterSize: continue # leg obstacles are already published in the tracked obstacles topic if is_leg: continue # close obstacles only if np.linalg.norm(cog_in_rob) > MAX_DIST_STATIC_CLUSTERS_M: continue # Add point obstacle obst = ObstacleMsg() obst.id = 0 obst.polygon.points = [Point32()] obst.polygon.points[0].x = cog_in_fix[0] obst.polygon.points[0].y = cog_in_fix[1] obst.polygon.points[0].z = 0 obst.radius = r yaw = 0 q = tf.transformations.quaternion_from_euler(0,0,yaw) from geometry_msgs.msg import Quaternion obst.orientation = Quaternion(*q) obst.velocities.twist.linear.x = 0 obst.velocities.twist.linear.y = 0 obst.velocities.twist.linear.z = 0 obst.velocities.twist.angular.x = 0 obst.velocities.twist.angular.y = 0 obst.velocities.twist.angular.z = 0 obstacles_msg.obstacles.append(obst) pub.publish(obstacles_msg) pub = rospy.Publisher('/obstacle_markers', MarkerArray, queue_size=1) # delete all markers ma = MarkerArray() mk = Marker() mk.header.frame_id = self.kFixedFrame mk.ns = "obstacles" mk.id = 0 mk.type = 0 # ARROW mk.action = 3 # deleteall ma.markers.append(mk) pub.publish(ma) # publish tracks ma = MarkerArray() id_ = 0 # track endpoint for trackid, xy, in_frame, vel in zip(track_ids, tracks_latest_pos, tracks_in_frame, tracks_velocities): if not in_frame: continue normvel = np.linalg.norm(vel) if normvel == 0: continue mk = Marker() mk.header.frame_id = self.kFixedFrame mk.ns = "tracks" mk.id = trackid mk.type = 0 # ARROW mk.action = 0 mk.scale.x = np.linalg.norm(vel) mk.scale.y = 0.1 mk.scale.z = 0.1 mk.color.r = color[0] mk.color.g = color[1] mk.color.b = color[2] mk.color.a = color[3] mk.frame_locked = True mk.pose.position.x = xy[0] mk.pose.position.y = xy[1] mk.pose.position.z = 0.03 yaw = np.arctan2(vel[1], vel[0]) q = tf.transformations.quaternion_from_euler(0,0,yaw) mk.pose.orientation = Quaternion(*q) ma.markers.append(mk) pub.publish(ma)