def gen_blog_entry(self, roi_id, obj_id, objs): print 'Region: ' + self.get_roi_name(roi_id) time = dt.fromtimestamp(int(rospy.get_time())) body = '### CHANGE DETECTION REPORT\n\n' body += '- **Region:** ' + self.get_roi_name(roi_id) + '\n\n' #body += '- **Object ID:** ' + str(obj_id) + '\n\n' body += '- **Time:** ' + str(time) + '\n\n' # Create some blog entries msg_store = MessageStoreProxy(collection=self.blog_collection) robblog_path = roslib.packages.get_pkg_dir('soma_utils') for idx,obj in enumerate(objs): bridge = CvBridge() im = bridge.imgmsg_to_cv2(obj.image_mask, desired_encoding="bgr8") imgray = cv2.cvtColor(im,cv2.COLOR_BGR2GRAY) ret,thresh = cv2.threshold(imgray,1,1,1) #cv2.threshold(imgray,127,255,0) contours, hierarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) bridge = CvBridge() cv_image = bridge.imgmsg_to_cv2(obj.image_full, desired_encoding="bgr8") cv2.drawContours(cv_image,contours,-1,(0,0,255),2) full_scene_contour = bridge.cv2_to_imgmsg(cv_image) img_id = msg_store.insert(full_scene_contour) body += '<font color="red">Detected change (' + str(idx+1) + '/'+ str(len(objs)) + '):</font>\n\n![My helpful screenshot](ObjectID(%s))\n\n' % img_id e = RobblogEntry(title=str(time) + " Change Detection - " + self.roi_name[roi_id], body= body ) msg_store.insert(e)
def add_topological_node(self, node_name, node_pose, add_close_nodes, dist=8.0): #Get New Node Name if node_name: name = node_name else: name = self.get_new_name() rospy.loginfo('Creating Node: '+name) if name in self.names: rospy.logerr("Node already exists, try another name") return False #Create Message store msg_store = MessageStoreProxy(collection='topological_maps') meta = {} meta["map"] = self.nodes.map meta["pointset"] = self.nodes.name meta["node"] = name node = strands_navigation_msgs.msg.TopologicalNode() node.name = name node.map = self.nodes.map node.pointset = self.name node.pose = node_pose node.yaw_goal_tolerance = self.yaw_goal_tolerance node.xy_goal_tolerance = self.xy_goal_tolerance node.localise_by_topic = '' vertices=self.generate_circle_vertices() for j in vertices : v = strands_navigation_msgs.msg.Vertex() v.x = float(j[0]) v.y = float(j[1]) node.verts.append(v) if add_close_nodes: close_nodes = [] for i in self.nodes.nodes: ndist = node_dist(node, i) if ndist < dist : if i.name != 'ChargingPoint': close_nodes.append(i.name) for i in close_nodes: e = strands_navigation_msgs.msg.Edge() e.node = i e.action = 'move_base' eid = '%s_%s' %(node.name, i) e.edge_id = eid e.top_vel =0.55 e.map_2d = node.map node.edges.append(e) for i in close_nodes: self.add_edge(i, node.name, 'move_base', '') msg_store.insert(node,meta) return True
def gen_blog_entry(self, roi_id, uuid): print 'Region: ' + self.get_roi_name(roi_id) time = dt.fromtimestamp(int(rospy.get_time())) body = '### INTRUSION DETECTION REPORT\n\n' body += '- **Region:** ' + self.get_roi_name(roi_id) + '\n\n' body += '- **Person UUID:** ' + str(uuid) + '\n\n' body += '- **Time:** ' + str(time) + '\n\n' #body += '- **Summary**: <font color="green">ALLOWED ITEMS (' + str(len(pos_objs)) + ')</font>, <font color="red">NOT-ALLOWED ITEMS (' + str(len(neg_objs)) + ')</font>\n\n' # # Create some blog entries msg_store = MessageStoreProxy(collection=self.blog_collection) robblog_path = roslib.packages.get_pkg_dir('soma_utils') img = rospy.wait_for_message('/upper_body_detector/image', Image, 5) bridge = CvBridge() cv_image = bridge.imgmsg_to_cv2(img, desired_encoding="bgr8") ros_img = bridge.cv2_to_imgmsg(cv_image) img_id = msg_store.insert(ros_img) body += '<font color="red">Detected person:</font>\n\n![My helpful screenshot](ObjectID(%s))\n\n' % img_id e = RobblogEntry(title=str(time) + " Intrusion Detection - " + self.get_roi_name(roi_id), body= body ) msg_store.insert(e)
def gen_blog_entry(self, roi_id, uuid): print 'Region: ' + self.get_roi_name(roi_id) time = dt.fromtimestamp(int(rospy.get_time())) body = '### INTRUSION DETECTION REPORT\n\n' body += '- **Region:** ' + self.get_roi_name(roi_id) + '\n\n' body += '- **Person UUID:** ' + str(uuid) + '\n\n' body += '- **Time:** ' + str(time) + '\n\n' #body += '- **Summary**: <font color="green">ALLOWED ITEMS (' + str(len(pos_objs)) + ')</font>, <font color="red">NOT-ALLOWED ITEMS (' + str(len(neg_objs)) + ')</font>\n\n' # # Create some blog entries msg_store = MessageStoreProxy(collection=self.blog_collection) robblog_path = roslib.packages.get_pkg_dir('soma_utils') img = rospy.wait_for_message('/upper_body_detector/image', Image, 5) bridge = CvBridge() cv_image = bridge.imgmsg_to_cv2(img, desired_encoding="bgr8") ros_img = bridge.cv2_to_imgmsg(cv_image) img_id = msg_store.insert(ros_img) body += '<font color="red">Detected person:</font>\n\n![My helpful screenshot](ObjectID(%s))\n\n' % img_id e = RobblogEntry(title=str(time) + " Intrusion Detection - " + self.get_roi_name(roi_id), body=body) msg_store.insert(e)
def publish_stats(self): pubst = NavStatistics() pubst.edge_id = self.stat.edge_id pubst.status = self.stat.status pubst.origin = self.stat.origin pubst.target = self.stat.target pubst.topological_map = self.stat.topological_map pubst.final_node = self.stat.final_node pubst.time_to_waypoint = self.stat.time_to_wp pubst.operation_time = self.stat.operation_time pubst.date_started = self.stat.get_start_time_str() pubst.date_at_node = self.stat.date_at_node.strftime( '%A, %B %d %Y, at %H:%M:%S hours') pubst.date_finished = self.stat.get_finish_time_str() self.stats_pub.publish(pubst) meta = {} meta["type"] = "Topological Navigation Stat" meta["epoch"] = calendar.timegm(self.stat.date_at_node.timetuple()) meta["date"] = self.stat.date_at_node.strftime( '%A, %B %d %Y, at %H:%M:%S hours') meta["pointset"] = self.stat.topological_map msg_store = MessageStoreProxy(collection='nav_stats') msg_store.insert(pubst, meta)
def publish_topological_map(): """ Publish a topological map for testing. """ # create a test topological map width = 5 height = 5 nodeSeparation = 10.0 test_nodes = topological_navigation.testing.create_cross_map( width=width, height=height, nodeSeparation=nodeSeparation) # now insert the map into the database msg_store = MessageStoreProxy(collection='topological_maps') map_name = 'test_top_map' meta = {} meta['map'] = map_name meta['pointset'] = map_name for (nodeName, node) in test_nodes.iteritems(): meta["node"] = nodeName node.map = meta['map'] node.pointset = meta['pointset'] msg_store.insert(node, meta) # and publish the map ps = map_publisher(map_name) return test_nodes
class logTweets(object): def __init__(self, name): rospy.loginfo(" ...starting") self.msg_sub = rospy.Subscriber('/strands_tweets/tweet', strands_tweets.msg.Tweeted, self.tweet_callback, queue_size=1) rospy.loginfo(" ...done") rospy.spin() def tweet_callback(self, msg) : self.msg_store = MessageStoreProxy(collection='twitter_log') to_save = strands_tweets.msg.Tweeted() try: to_save.depth = rospy.wait_for_message('/head_xtion/depth/image_rect_meters', sensor_msgs.msg.Image, timeout=1.0) except rospy.ROSException : rospy.logwarn("Failed to get camera depth Image") try: to_save.photo = rospy.wait_for_message('/head_xtion/rgb/image_color', sensor_msgs.msg.Image, timeout=1.0) except rospy.ROSException : rospy.logwarn("Failed to get camera rgb Image") to_save.text = msg.text meta = {} meta["Description"] = "copy of tweeted images" self.msg_store.insert(msg, meta)
def add_topological_node(self, node_name, node_pose): dist = 8.0 #Get New Node Name if node_name: name = node_name else: name = self.get_new_name() rospy.loginfo('Creating Node: '+name) if name in self.names: rospy.logerr("Node already exists, try another name") return False #Create Message store msg_store = MessageStoreProxy(collection='topological_maps') meta = {} meta["map"] = self.nodes.map meta["pointset"] = self.nodes.name meta["node"] = name node = strands_navigation_msgs.msg.TopologicalNode() node.name = name node.map = self.nodes.map node.pointset = self.name node.pose = node_pose node.yaw_goal_tolerance = 0.1 node.xy_goal_tolerance = 0.3 node.localise_by_topic = '' vertices=[(0.69, 0.287), (0.287, 0.69), (-0.287, 0.69), (-0.69, 0.287), (-0.69, -0.287), (-0.287, -0.69), (0.287, -0.69), (0.69, -0.287)] for j in vertices : v = strands_navigation_msgs.msg.Vertex() v.x = float(j[0]) v.y = float(j[1]) node.verts.append(v) close_nodes = [] for i in self.nodes.nodes : ndist = node_dist(node, i) if ndist < dist : if i.name != 'ChargingPoint' : close_nodes.append(i.name) for i in close_nodes: e = strands_navigation_msgs.msg.Edge() e.node = i e.action = 'move_base' eid = '%s_%s' %(node.name, i) e.edge_id = eid e.top_vel =0.55 e.map_2d = node.map node.edges.append(e) for i in close_nodes: self.add_edge(i, node.name, 'move_base', '') msg_store.insert(node,meta) return True
class PeriodicReplicatorClient(Thread): def __init__(self): Thread.__init__(self) self.dead = Event() self.interval = rospy.get_param("mongodb_replication_interval", 60 * 60 * 24) # default: 1 day self.delete_after_move = rospy.get_param("mongodb_replication_delete_after_move", False) self.replicate_interval = rospy.Duration(self.interval) self.database = rospy.get_param("robot/database") self.collections = sys.argv[2:] try: self.collections.append(rospy.get_param("robot/name")) except KeyError as e: rospy.logerr("specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.periodic = rospy.get_param("~periodic", True) self.date_msg_store = MessageStoreProxy(database=self.database, collection="replication") def run(self): while not self.dead.wait(self.interval): move_before = self.time_after_last_replicate_date() self.move_entries(move_before) self.insert_replicate_date() def time_after_last_replicate_date(self): delta = 60 * 60 * 24 # 1 day try: last_replicated = self.date_msg_store.query(StringList._type, single=True, sort_query=[("$natural",-1)]) date = last_replicated[1]["inserted_at"] rospy.loginfo("last replicated at %s", date) delta = (dt.datetime.now() - date).seconds + 60 except Exception as e: rospy.logwarn("failed to search last replicated date from database: %s", e) finally: return rospy.Duration(delta) def insert_replicate_date(self): try: self.date_msg_store.insert(StringList(self.collections)) except Exception as e: rospy.logwarn("failed to insert last replicate date to database: %s", e) def move_entries(self, move_before): client = actionlib.SimpleActionClient('move_mongodb_entries', MoveEntriesAction) client.wait_for_server() goal = MoveEntriesGoal(database=self.database, collections=StringList(self.collections), move_before=move_before, delete_after_move=self.delete_after_move) client.send_goal(goal, feedback_cb=self.feedback_cb) client.wait_for_result() def feedback_cb(self, feedback): rospy.loginfo(feedback) def cancel(self): self.dead.set()
def save_grid_cb(self, req): print req print req.grid meta = {} msg_store = MessageStoreProxy(collection='fremengrids') #change this msg_store.insert(req.grid, meta) return True
def add_node(self, name, dist, pos, std_action): rospy.loginfo('Creating Node: ' + name) msg_store = MessageStoreProxy(collection='topological_maps') found = False for i in self.nodes: if i.name == name: found = True if found: rospy.logerr("Node already exists, try another name") else: rospy.loginfo('Adding node: ' + name) meta = {} meta["map"] = self.map meta["pointset"] = self.name meta["node"] = name node = TopologicalNode() node.name = name node.map = self.map node.pointset = self.name node.pose = pos vertices = [(0.69, 0.287), (0.287, 0.69), (-0.287, 0.69), (-0.69, 0.287), (-0.69, -0.287), (-0.287, -0.69), (0.287, -0.69), (0.69, -0.287)] for j in vertices: v = Vertex() v.x = float(j[0]) v.y = float(j[1]) node.verts.append(v) cx = node.pose.position.x cy = node.pose.position.y close_nodes = [] for i in self.nodes: ndist = i._get_distance(cx, cy) if ndist < dist: if i.name != 'ChargingPoint': close_nodes.append(i.name) for i in close_nodes: edge = Edge() edge.node = i edge.action = std_action node.edges.append(edge) msg_store.insert(node, meta) for i in close_nodes: self.add_edge(i, name, std_action) # need to reload the map when a node is added, for consistency self.loadMap(self.name)
def test_add_message_no_wait(self): msg_store = MessageStoreProxy() count_before_insert = len( msg_store.query(Pose._type, meta_query={"no_wait": True})) p = Pose(Point(0, 1, 2), Quaternion(0, 0, 0, 1)) for i in range(10): msg_store.insert(p, meta={"no_wait": True}, wait=False) rospy.sleep(2) count_after_insert = len( msg_store.query(Pose._type, meta_query={"no_wait": True})) self.assertTrue(count_after_insert > count_before_insert)
def test_add_message(self): msg_store = MessageStoreProxy() POSE_NAME = "__test__pose__" p = Pose(Point(0, 1, 2), Quaternion(0, 0, 0, 1)) # insert a pose object with a name msg_store.insert_named(POSE_NAME, p) # get it back with a name stored, meta = msg_store.query_named(POSE_NAME, Pose._type) self.assertIsInstance(stored, Pose) self.assertEqual(stored.position.x, p.position.x) self.assertEqual(stored.position.y, p.position.y) self.assertEqual(stored.position.z, p.position.z) self.assertEqual(stored.orientation.x, p.orientation.x) self.assertEqual(stored.orientation.y, p.orientation.y) self.assertEqual(stored.orientation.z, p.orientation.z) self.assertEqual(stored.orientation.w, p.orientation.w) p.position.x = 666 msg_store.update_named(POSE_NAME, p) # get it back with a name updated = msg_store.query_named(POSE_NAME, Pose._type)[0] self.assertEqual(updated.position.x, p.position.x) # # try to get it back with an incorrect name wrong_name = "thid name does not exist in the datacentre" none_item = msg_store.query_named(wrong_name, Pose._type)[0] self.assertIsNone(none_item) # # get all non-existant typed objects, so get an empty list back none_query = msg_store.query( "not my type") self.assertEqual(len(none_query), 0) # add 100 query and sort by date inserted. for i in range(100): p = Pose(Point(0, 0, 0), Quaternion(i, 0, 100, 1)) msg_store.insert(p) result = msg_store.query(Pose._type, message_query={ 'orientation.z': {'$gt': 10} }, sort_query=[("$natural", -1)]) self.assertEqual(len(result), 100) self.assertEqual(result[0][0].orientation.x, 99) # must remove the item or unittest only really valid once print meta["_id"] print str(meta["_id"]) deleted = msg_store.delete(str(meta["_id"])) self.assertTrue(deleted)
def add_node(self, name, dist, pos, std_action) : rospy.loginfo('Creating Node: '+name) msg_store = MessageStoreProxy(collection='topological_maps') found = False for i in self.nodes : if i.name == name : found = True if found : rospy.logerr("Node already exists, try another name") else : rospy.loginfo('Adding node: '+name) meta = {} meta["map"] = self.map meta["pointset"] = self.name meta["node"] = name node = TopologicalNode() node.name = name node.map = self.map node.pointset = self.name node.pose = pos vertices=[(0.69, 0.287), (0.287, 0.69), (-0.287, 0.69), (-0.69, 0.287), (-0.69, -0.287), (-0.287, -0.69), (0.287, -0.69), (0.69, -0.287)] for j in vertices : v = Vertex() v.x = float(j[0]) v.y = float(j[1]) node.verts.append(v) cx = node.pose.position.x cy = node.pose.position.y close_nodes = [] for i in self.nodes : ndist = i._get_distance(cx, cy) if ndist < dist : if i.name != 'ChargingPoint' : close_nodes.append(i.name) for i in close_nodes : edge = Edge() edge.node = i edge.action = std_action node.edges.append(edge) msg_store.insert(node,meta) for i in close_nodes : self.add_edge(i, name, std_action)
def statsCallback(self, msg): #print "---------------------------------------------------" meta = {} meta["type"] = "Topological Navigation Stat" epoch = datetime.strptime(msg.date_at_node, '%A, %B %d %Y, at %H:%M:%S hours').timetuple()# .time() #print calendar.timegm(epoch) meta["epoch"] = calendar.timegm(epoch)#calendar.timegm(self.stat.date_at_node.timetuple()) meta["date"] = msg.date_at_node meta["pointset"] = msg.topological_map #print meta msg_store = MessageStoreProxy(collection='nav_stats') msg_store.insert(msg,meta)
class MongoImage: def __init__(self): rospy.Service('shot', Empty, self.shotCallback) rospy.Service('delete', Empty, self.deleteCallback) rospy.Service('request', StringStampList, self.requestCallback) rospy.Subscriber("/image_raw", Image, self.imageCallback) self.last_image = Image() self.scale = 0.5 self.bridge = CvBridge() self.last_base64 = None self.msg_store = MessageStoreProxy(database="srs", collection="image_stamp") def imageCallback(self, msg): self.last_image = msg def shotCallback(self, msg): frame = self.bridge.imgmsg_to_cv2(self.last_image, "bgr8") height = frame.shape[0] width = frame.shape[1] frame2 = cv2.resize( frame, (int(width * self.scale), int(height * self.scale))) encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 50] result, frame3 = cv2.imencode(".jpg", frame2, encode_param) mongo_data = StringStamp() mongo_data.stamp = rospy.get_rostime() mongo_data.data = base64.b64encode(frame3) try: p_id = self.msg_store.insert(mongo_data) except rospy.ServiceException, e: print "Service call failed: %s" % e return EmptyResponse()
class LoggerBase(object): def __init__(self, db_name='jsk_robot_lifelog', col_name=None): self.db_name = rospy.get_param('robot/database', 'jsk_robot_lifelog') try: if col_name is None: self.col_name = rospy.get_param('robot/name') else: self.col_name = col_name except KeyError as e: rospy.logerr( "please specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.update_cycle = rospy.get_param("update_cycle", 1) self.task_id = None self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) def insert(self, msg, meta={}, wait=False): if self.task_id is not None: meta.update({"task_id": self.task_id}) return self.msg_store.insert(msg, meta, wait=wait) def spinOnce(self): rospy.sleep(self.update_cycle) self.task_id = rospy.get_param("/task_id", None)
class LoggerBase(object): def __init__(self, db_name='jsk_robot_lifelog', col_name=None): self.db_name = rospy.get_param('robot/database','jsk_robot_lifelog') try: if col_name is None: self.col_name = rospy.get_param('robot/name') else: self.col_name = col_name except KeyError as e: rospy.logerr("please specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.update_cycle = rospy.get_param("update_cycle", 1) self.task_id = None self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) def insert(self, msg, meta={}, wait=False): if self.task_id is not None: meta.update({ "task_id": self.task_id }) return self.msg_store.insert(msg, meta, wait=wait) def spinOnce(self): rospy.sleep(self.update_cycle) self.task_id = rospy.get_param("/task_id", None)
def test_travel_time_estimator(self): # create a test topological map width = 5 height = 5 nodeSeparation = 10.0 test_nodes = topological_navigation.testing.create_cross_map( width=width, height=height, nodeSeparation=nodeSeparation) self.assertEqual(len(test_nodes), width + height - 1) startIndex = -(width / 2) endIndex = (width / 2) startNode = test_nodes['h_%s' % startIndex] self.assertIsNotNone(startNode) endNode = test_nodes['h_%s' % endIndex] self.assertIsNotNone(endNode) # locally check distance dist = math.hypot( (startNode.pose.position.x - endNode.pose.position.x), (startNode.pose.position.y - endNode.pose.position.y)) self.assertEqual(dist, (width - 1) * nodeSeparation) # now insert the map into the database msg_store = MessageStoreProxy(collection='topological_maps') meta = {} meta['map'] = 'test_travel_time_estimator_map' meta['pointset'] = 'test_travel_time_estimator_map' for (nodeName, node) in test_nodes.iteritems(): meta["node"] = nodeName node.map = meta['map'] node.pointset = meta['pointset'] msg_store.insert(node, meta) # and publish the map ps = map_publisher('test_travel_time_estimator_map') # now wait for the distance service time_srv_name = 'topological_navigation/travel_time_estimator' rospy.wait_for_service(time_srv_name, timeout=10) time_srv = rospy.ServiceProxy(time_srv_name, EstimateTravelTime) time_estimate = time_srv(startNode.name, endNode.name).travel_time # the time should be (at least for now) at least as long as the straight-line distance at 1m/s self.assertGreaterEqual(time_estimate.to_sec(), (width - 1) * nodeSeparation)
def update_maps(to_update, client): db=client.message_store collection=db["topological_maps"] # available = collection.find().distinct("_meta.pointset") msg_store = MessageStoreProxy(collection='topological_maps') del_ids = [] for pointset in to_update: #pointsetb='%s_b'%pointset #print pointsetb search = {"_meta.pointset": pointset} av =collection.find(search) #lnodes=[] eids=[] #list of known edge id's for a in av: #print a bc = update_node(a, pointset) nna = a['name'] nma = a['map'] vt = a['verts'] if a['edges']: es = a['edges'] for i in es: ed, eids = update_edge(i, nna, nma, eids) bc.edges.append(ed) for i in vt: v = update_vert(i) bc.verts.append(v) meta = update_meta(a['_meta'], pointset) #print bc #print meta del_ids.append(a['_id']) #lnodes.append(bc) msg_store.insert(bc,meta) #print lnodes print "Deleting updated nodes" for i in del_ids: msg_store.delete(str(i)) print "done"
def update_maps(to_update, client): db = client.message_store collection = db["topological_maps"] # available = collection.find().distinct("_meta.pointset") msg_store = MessageStoreProxy(collection='topological_maps') del_ids = [] for pointset in to_update: #pointsetb='%s_b'%pointset #print pointsetb search = {"_meta.pointset": pointset} av = collection.find(search) #lnodes=[] eids = [] #list of known edge id's for a in av: #print a bc = update_node(a, pointset) nna = a['name'] nma = a['map'] vt = a['verts'] if a['edges']: es = a['edges'] for i in es: ed, eids = update_edge(i, nna, nma, eids) bc.edges.append(ed) for i in vt: v = update_vert(i) bc.verts.append(v) meta = update_meta(a['_meta'], pointset) #print bc #print meta del_ids.append(a['_id']) #lnodes.append(bc) msg_store.insert(bc, meta) #print lnodes print "Deleting updated nodes" for i in del_ids: msg_store.delete(str(i)) print "done"
def test_non_ascii(self): msg_store = MessageStoreProxy() msg = String(data="こんにちは") # non ascii string doc_id = msg_store.insert(msg) try: qmsg, _ = msg_store.query_id(doc_id, String._type) self.assertEqual(msg.data, qmsg.data) except rospy.service.ServiceException: self.fail("non ascii unicode string cannot be queried")
class People_Tracker_Log(): def __init__(self, people_tracker_topic, dataset_name): rospy.loginfo("Initialising Logging " + people_tracker_topic) self.pt = PeopleTracker() self.rp = Pose() self.dataset_name = dataset_name self.topic = people_tracker_topic self.msg_store = MessageStoreProxy(collection="people_perception_morse") # Subscribing to global poses of human avatars rospy.Subscriber(people_tracker_topic, PeopleTracker, self.pt_callback) # Subscribing to robot pose rospy.Subscriber('/robot_pose', PoseStamped, self.rp_callback) # Publishing the log self.pub = rospy.Publisher('morse_people_tracker_log', Logging, queue_size=10) self.seq = 0 def pt_callback(self,data): self.pt = data def rp_callback(self,data): self.rp = data.pose def log(self): if len(self.pt.distances) == 0: return meta = dict() meta['perspective'] = self.dataset_name rospy.logdebug("Person detected for " + self.topic) message = Logging() message.header.seq = self.seq message.header.stamp = rospy.Time.now() message.header.frame_id = '/map' message.uuids = self.pt.uuids message.people = self.pt.poses message.people_tracker = self.pt message.robot = self.rp self.msg_store.insert(message, meta) self.pub.publish(message) self.seq += 1
def test_single_task_timing(self): rospy.logwarn('test_single_task_timing') msg_store = MessageStoreProxy() # now wait for the distance service time_srv_name = 'topological_navigation/travel_time_estimator' rospy.wait_for_service(time_srv_name, timeout=10) time_srv = rospy.ServiceProxy(time_srv_name, EstimateTravelTime) checks = [] def checked(bool): checks.append(bool) task_checker = CheckTaskActionServer(result_fn=checked) task_checker.start() now = rospy.get_rostime() delay = rospy.Duration(5) current_node = self.current_node task = Task() task.action = 'check_task' task.start_node_id = 'v_1' # make sure action dur travel_time = time_srv(current_node, task.start_node_id).travel_time task.max_duration = rospy.Duration(travel_time.to_sec() / 4) task.start_after = now + delay + travel_time task.end_before = task.start_after + rospy.Duration( travel_time.to_sec() / 2) # add the task argument which is a reference to a copy of itself in the db object_id = msg_store.insert(task) task_utils.add_object_id_argument(task, object_id, Task) client = actionlib.SimpleActionClient('check_task', TaskTestAction) client.wait_for_server() # get services to call into execution framework add_tasks, set_execution_status = get_services() add_tasks([task]) set_execution_status(True) while len(checks) < 1 and not rospy.is_shutdown(): rospy.sleep(0.1) for result in checks: self.assertTrue(result)
class logTweets(object): def __init__(self, name): rospy.loginfo(" ...starting") self.msg_sub = rospy.Subscriber('/strands_tweets/tweet', strands_tweets.msg.Tweeted, self.tweet_callback, queue_size=1) rospy.loginfo(" ...done") rospy.spin() def tweet_callback(self, msg): self.msg_store = MessageStoreProxy(collection='twitter_log') to_save = strands_tweets.msg.Tweeted() try: to_save.depth = rospy.wait_for_message( '/head_xtion/depth/image_rect_meters', sensor_msgs.msg.Image, timeout=1.0) except rospy.ROSException: rospy.logwarn("Failed to get camera depth Image") try: to_save.photo = rospy.wait_for_message( '/head_xtion/rgb/image_color', sensor_msgs.msg.Image, timeout=1.0) except rospy.ROSException: rospy.logwarn("Failed to get camera rgb Image") to_save.text = msg.text meta = {} meta["Description"] = "copy of tweeted images" self.msg_store.insert(msg, meta)
def test_replication_with_query(self): replication_db = "replication_test_with_query" replication_col = "replication_test_with_query" # connect to destination for replication try: self.assertTrue(wait_for_mongo(ns="/datacentre2"), "wait for mongodb server") dst_client = import_MongoClient()("localhost", 49163) count = dst_client[replication_db][replication_col].count() self.assertEqual(count, 0, "No entry in destination") except pymongo.errors.ConnectionFailure: self.fail("Failed to connect to destination for replication") # insert an entry to move self.assertTrue(wait_for_mongo(), "wait for mongodb server") msg_store = MessageStoreProxy( database=replication_db, collection=replication_col) for i in range(5): msg = Wrench() msg.force.x = i msg_store.insert(msg) msg = Pose() msg.position.x = i msg_store.insert(msg) # move entries with query rospy.sleep(3) query = {'_meta.stored_type': Pose._type} retcode = subprocess.check_call([ get_script_path(), '--move-before', '0', '--query', json_util.dumps(query), replication_db, replication_col]) self.assertEqual(retcode, 0, "replicator_client returns code 0") # check if replication was succeeded rospy.sleep(3) count = dst_client[replication_db][replication_col].count() self.assertEqual(count, 5, "replicated entry exists in destination")
def publish_stats(self): pubst = NavStatistics() pubst.edge_id = self.stat.edge_id pubst.status = self.stat.status pubst.origin = self.stat.origin pubst.target = self.stat.target pubst.topological_map = self.stat.topological_map pubst.final_node = self.stat.final_node pubst.time_to_waypoint = self.stat.time_to_wp pubst.operation_time = self.stat.operation_time pubst.date_started = self.stat.get_start_time_str() pubst.date_at_node = self.stat.date_at_node.strftime('%A, %B %d %Y, at %H:%M:%S hours') pubst.date_finished = self.stat.get_finish_time_str() self.stats_pub.publish(pubst) meta = {} meta["type"] = "Topological Navigation Stat" meta["epoch"] = calendar.timegm(self.stat.date_at_node.timetuple()) meta["date"] = self.stat.date_at_node.strftime('%A, %B %d %Y, at %H:%M:%S hours') meta["pointset"] = self.stat.topological_map msg_store = MessageStoreProxy(collection='nav_stats') msg_store.insert(pubst,meta)
class LoggerBase(object): def __init__(self, db_name='jsk_robot_lifelog', col_name=None, ensure_index=True): super(LoggerBase, self).__init__() self.db_name = rospy.get_param('/robot/database', 'jsk_robot_lifelog') try: if col_name is None: self.col_name = rospy.get_param('/robot/name') else: self.col_name = col_name except KeyError as e: rospy.logerr( "please specify param \"/robot/name\" (e.g. pr1012, olive)") exit(1) self.task_id = None self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) if ensure_index: try: MongoClient = MU.import_MongoClient() host = rospy.get_param("/mongodb_host") port = rospy.get_param("/mongodb_port") client = MongoClient(host, port) c = client[self.db_name][self.col_name] indices = [ i['key'][0][0] for i in c.index_information().values() ] keys = ["_meta.stored_type", "_meta.inserted_at"] for key in keys: if key not in indices: rospy.loginfo("Creating index for key '%s'" % key) c.ensure_index(key) rospy.loginfo("Created index for key '%s'" % key) client.close() except Exception as e: rospy.logerr("Failed to ensure index: %s" % e) def insert(self, msg, meta={}, wait=False): if self.task_id is not None: meta.update({"task_id": self.task_id}) return self.msg_store.insert(msg, meta, wait=wait) def spinOnce(self): self.task_id = rospy.get_param("/task_id", None)
def create_datacentre_task(to_replicate, delete_after_move=True): task = Task() # no idea, let's say 2 hours for now -- this action server can't be preempted though, so this is cheating task.max_duration = rospy.Duration(60 * 60 * 2) task.action = 'move_mongodb_entries' # add arg for collectionst o replication collections = StringList(to_replicate) msg_store = MessageStoreProxy() object_id = msg_store.insert(collections) task_utils.add_object_id_argument(task, object_id, StringList) # move stuff over 24 hours old task_utils.add_duration_argument(task, rospy.Duration(60 * 60 *24)) # and delete afterwards task_utils.add_bool_argument(task, delete_after_move) return task
class Listener: def __init__(self): rospy.Subscriber("/face_detection/faces", FaceArrayStamped, self.faceCallback) rospy.Subscriber("/image_raw_throttle", Image, self.imageCallback) self.bridge = CvBridge() self.last_base64 = None self.active = False self.counter = 0 self.threshold = 2 self.msg_store = MessageStoreProxy(database="srs", collection="face_detect") def faceCallback(self, msg): if self.active: if len(msg.faces) == 0: self.active = False print("inactivating") else: if len(msg.faces) > 0: if len(msg.faces[0].eyes) >= 2: self.active = True print("activating") self.activating(msg) def imageCallback(self, msg): frame = self.bridge.imgmsg_to_cv2(msg, "bgr8") height = frame.shape[0] width = frame.shape[1] frame2 = cv2.resize(frame, (int(width * 0.25), int(height * 0.25))) encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 50] result, frame3 = cv2.imencode(".jpg", frame2, encode_param) self.last_base64 = base64.b64encode(frame3) def activating(self, msg): try: data = MongoData() data.stamp = rospy.get_rostime() data.x = msg.faces[0].face.x data.y = msg.faces[0].face.y data.image = self.last_base64 p_id = self.msg_store.insert(data) except rospy.ServiceException, e: print "Service call failed: %s" % e
class RegionObservationTimeManager(object): def __init__(self, soma_map, soma_config): self.soma_map = soma_map self.soma_config = soma_config self.ms = MessageStoreProxy(collection="region_observation_time") self.gs = GeoSpatialStoreProxy('geospatial_store', 'soma') self.roslog = pymongo.MongoClient( rospy.get_param("mongodb_host", "localhost"), rospy.get_param("mongodb_port", 62345) ).roslog.robot_pose self.reinit() def reinit(self): self.region_observation_duration = dict() self._month_year_observation_taken = dict() def load_from_mongo(self, days, minute_interval=60): """ Load robot-region observation time from database (mongodb) and store them in self.region_observation_duration. Returning (region observation time, total duration of observation) """ roi_region_daily = dict() total_duration = 0 self.minute_interval = minute_interval for i in days: end_date = i + datetime.timedelta(hours=24) rospy.loginfo( "Load region observation time from %s to %s..." % (str(i), str(end_date)) ) query = { "soma": self.soma_map, "soma_config": self.soma_config, "start_from.secs": { "$gte": time.mktime(i.timetuple()), "$lt": time.mktime(end_date.timetuple()) } } roi_reg_hourly = dict() for log in self.ms.query(RegionObservationTime._type, message_query=query): end = datetime.datetime.fromtimestamp(log[0].until.secs) if log[0].region_id not in roi_reg_hourly: temp = list() start = datetime.datetime.fromtimestamp(log[0].start_from.secs) interval = (end.minute + 1) - start.minute if interval != minute_interval: continue for j in range(24): group_mins = {k*interval: 0 for k in range(1, (60/interval)+1)} temp.append(group_mins) roi_reg_hourly.update({log[0].region_id: temp}) roi_reg_hourly[log[0].region_id][end.hour][end.minute+1] = log[0].duration.secs roi_reg_hourly[log[0].region_id][end.hour][end.minute+1] += 0.000000001 * log[0].duration.nsecs total_duration += log[0].duration.secs roi_region_daily.update({i.day: roi_reg_hourly}) self.region_observation_duration = roi_region_daily return roi_region_daily, total_duration def store_to_mongo(self): """ Store region observation time from self.region_observation_duration to mongodb. It will store soma map, soma configuration, region_id, the starting and end time where robot sees a region in some interval, and the duration of how long the robot during the interval. """ rospy.loginfo("Storing region observation time to region_observation_time collection...") data = self.region_observation_duration try: minute_interval = self.minute_interval except: rospy.logwarn("Minute interval is not defined, please call either load_from_mongo or calculate_region_observation_duration first") return for day, daily_data in data.iteritems(): for reg, reg_data in daily_data.iteritems(): for hour, hourly_data in enumerate(reg_data): for minute, duration in hourly_data.iteritems(): date_until = datetime.datetime( self._month_year_observation_taken[day][1], self._month_year_observation_taken[day][0], day, hour, minute-1, 59 ) until = time.mktime(date_until.timetuple()) start_from = until - (60 * minute_interval) + 1 msg = RegionObservationTime( self.soma_map, self.soma_config, reg, rospy.Time(start_from), rospy.Time(until), rospy.Duration(duration) ) self._store_to_mongo(msg) def _store_to_mongo(self, msg): query = { "soma": msg.soma, "soma_config": msg.soma_config, "region_id": msg.region_id, "start_from.secs": msg.start_from.secs, "until.secs": msg.until.secs } if msg.duration.nsecs > 0: if len(self.ms.query(RegionObservationTime._type, message_query=query)) > 0: self.ms.update(msg, message_query=query) else: self.ms.insert(msg) def calculate_region_observation_duration(self, days, minute_interval=60): """ Calculating the region observation duration for particular days, splitted by minute_interval. Returns the ROIs the robot has monitored at each logged robot pose for particular days specified up to minutes interval. """ rospy.loginfo('Calculation region observation duration...') roi_region_daily = dict() self.minute_interval = minute_interval for i in days: loaded_roi_reg_day = self.load_from_mongo([i], minute_interval) if loaded_roi_reg_day[1] == 0: end_date = i + datetime.timedelta(hours=24) roi_region_hourly = self._get_robot_region_stay_duration(i, end_date, minute_interval) roi_region_daily.update({i.day: roi_region_hourly}) else: roi_region_daily.update({i.day: loaded_roi_reg_day[0][i.day]}) self._month_year_observation_taken.update({i.day: (i.month, i.year)}) self.region_observation_duration = roi_region_daily return roi_region_daily # hidden function for get_robot_region_stay_duration def _get_robot_region_stay_duration(self, start_date, end_date, minute_interval=60): sampling_rate = 10 roi_temp_list = dict() rospy.loginfo("Querying from " + str(start_date) + " to " + str(end_date)) query = { "_id": {"$exists": "true"}, "_meta.inserted_at": {"$gte": start_date, "$lt": end_date} } for i, pose in enumerate(self.roslog.find(query)): if i % sampling_rate == 0: _, _, yaw = euler_from_quaternion( [0, 0, pose['orientation']['z'], pose['orientation']['w']] ) coords = robot_view_cone(pose['position']['x'], pose['position']['y'], yaw) # coords = robot_view_area(pose['position']['x'], pose['position']['y'], yaw) langitude_latitude = list() for pt in coords: langitude_latitude.append(self.gs.coords_to_lnglat(pt[0], pt[1])) langitude_latitude.append(self.gs.coords_to_lnglat(coords[0][0], coords[0][1])) for j in self.gs.observed_roi(langitude_latitude, self.soma_map, self.soma_config): region = str(j['soma_roi_id']) hour = pose['_meta']['inserted_at'].time().hour minute = pose['_meta']['inserted_at'].time().minute # Region Knowledge per hour. Bin them by hour and minute_interval. if region not in roi_temp_list: temp = list() for i in range(24): group_mins = { i*minute_interval: 0 for i in range(1, (60/minute_interval)+1) } temp.append(group_mins) roi_temp_list[region] = temp index = ((minute / minute_interval) + 1) * minute_interval roi_temp_list[region][hour][index] += 1 roi_temp_list = self._normalizing(roi_temp_list, sampling_rate) rospy.loginfo("Region observation duration for the query is " + str(roi_temp_list)) return roi_temp_list def _normalizing(self, roi_temp_list, sampling_rate): """ Checking if all robot region relation based on its stay duration is capped by minute_interval * 60 (total seconds). If it is not, then normalization is applied """ regions = roi_temp_list.keys() _hourly_poses = [0] * 24 for i in range(24): for region in regions: _hourly_poses[i] += sum(roi_temp_list[region][i].values()) normalizing = sum([1 for i in _hourly_poses if i > 3601]) > 0 max_hourly_poses = max(_hourly_poses) for reg, hourly_poses in roi_temp_list.iteritems(): if normalizing: for ind, val in enumerate(hourly_poses): for minute, seconds in val.iteritems(): roi_temp_list[reg][ind][minute] = 3600 / float(max_hourly_poses) * seconds return roi_temp_list
msg_store = MessageStoreProxy() msg_store_maps = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["stored_type"] = "topological_navigation_msgs/TopologicalNode" available = len(msg_store.query(TopologicalNode._type, {}, query_meta)) > 0 #print available if available <= 0: #rospy.logerr("Desired pointset '"+point_set+"' not in datacentre") #rospy.logerr("Available pointsets: "+str(available)) raise Exception("Can't find waypoints.") else: message_list = msg_store.query(TopologicalNode._type, {}, query_meta) for i in message_list: #print i meta = {} meta["node"] = i[0].name meta["map"] = i[0].map meta["pointset"] = i[0].pointset available = len( msg_store_maps.query(TopologicalNode._type, {}, meta)) if available == 0: msg_store_maps.insert(i[0], meta) else: rospy.logerr("this point is already in datacentre:") print meta
from mongodb_store.message_store import MessageStoreProxy import yaml import json import pprint import argparse def loadDialogue(inputfile, dataset_name): print "openning %s" % inputfile with open(inputfile) as f: content = f.readlines() print "Done" return content if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("dataset_name", help="The name of the dataset. Saved in meta information using 'meta_name'", type=str) parser.add_argument("-i", "--input", help="Input yaml file", type=str, required=True) parser.add_argument("--collection_name", help="The collection name. Default: aaf_walking_group", type=str, default="aaf_walking_group") parser.add_argument("--meta_name", help="The name of the meta filed to store 'dataset_name' in. Default: waypoint_set", type=str, default="waypoint_set") args = parser.parse_args() msg_store = MessageStoreProxy(collection=args.collection_name) data = yaml.load(open(args.input)) meta = {} meta[args.meta_name] = args.dataset_name pprint.pprint(meta) pprint.pprint(data) msg_store.insert(std_msgs.msg.String(json.dumps(data)), meta)
class TRAN(object): def __init__(self, name): self.stored_uuids = list() self._uncertain_uuids = list() self.have_stored_uuids = list() self.trajectories = OfflineTrajectories() self.upbods = MessageStoreProxy(collection='upper_bodies').query( LoggingUBD._type, sort_query=[("$natural", 1)] ) self._store_client = MessageStoreProxy(collection="trajectory_types") self.bridge = CvBridge() self.window = None self._traj_type = -1 # create window for the image to show, and the button to click def _create_window(self, img): # main window self.window = Tkinter.Tk() self.window.title("Trajectory Annotator") # image frame in the top part of the main window imgtk = ImageTk.PhotoImage(image=Image.fromarray(img)) Tkinter.LabelFrame(self.window).pack() Tkinter.Label(self.window, image=imgtk).pack() # button frame in the bottom part of the main window buttons_frame = Tkinter.LabelFrame(self.window).pack(side=Tkinter.BOTTOM, expand="yes") # human human_button = Tkinter.Button( buttons_frame, text="Human", fg="black", command=self._human_button_cb ) human_button.pack(side=Tkinter.LEFT) # non-human human_button = Tkinter.Button( buttons_frame, text="Non-Human", fg="black", command=self._nonhuman_button_cb ) human_button.pack(side=Tkinter.RIGHT) # window main loop self.window.mainloop() # call back for human button def _human_button_cb(self): self.window.destroy() self._traj_type = 1 # call back for non-human button def _nonhuman_button_cb(self): self.window.destroy() self._traj_type = 0 # check in database whether the uuid exists def _check_mongo_for_uuid(self, uuids): for uuid in uuids: logs = self._store_client.query( TrajectoryType._type, message_query={"uuid": uuid} ) if len(logs) and uuid not in self.have_stored_uuids: self.have_stored_uuids.append(uuid) # hidden annotation function for presenting image and get the vote def _annotate(self, ubd, index, uuids, annotation): stop = False self._check_mongo_for_uuid(uuids) if len(uuids) > 0 and not (set(uuids).issubset(self.stored_uuids) or set(uuids).issubset(self.have_stored_uuids)): if len(ubd[0].ubd_rgb) == len(ubd[0].ubd_pos): b, g, r = cv2.split( self.bridge.imgmsg_to_cv2(ubd[0].ubd_rgb[index]) ) self._create_window(cv2.merge((r, g, b))) if int(self._traj_type) in [0, 1]: for uuid in [i for i in uuids if i not in self.stored_uuids]: rospy.loginfo("%s is stored..." % uuid) annotation.update({uuid: int(self._traj_type)}) self.stored_uuids.append(uuid) if len(uuids) > 1: self._uncertain_uuids.append(uuid) self._uncertain_uuids = [ i for i in self._uncertain_uuids if i not in uuids ] else: stop = True else: rospy.logwarn("UBD_RGB has %d data, but UBD_POS has %d data" % (len(ubd[0].ubd_rgb), len(ubd[0].ubd_pos))) # if all uuids have been classified before, then this removes # all doubts about those uuids elif set(uuids).issubset(self.stored_uuids): self._uncertain_uuids = [ i for i in self._uncertain_uuids if i not in uuids ] return stop, annotation # annotation function def annotate(self): stop = False annotation = dict() for i in self.upbods: for j in range(len(i[0].ubd_pos)): uuids = self._find_traj_frm_pos( i[0].header, i[0].ubd_pos[j], i[0].robot ) stop, annotation = self._annotate(i, j, uuids, annotation) self._traj_type = -1 if stop: break # storing the data counter = 1 for uuid, value in annotation.iteritems(): header = Header(counter, rospy.Time.now(), '') traj_type = 'human' if not value: traj_type = 'non-human' anno_msg = TrajectoryType(header, uuid, traj_type) if uuid in self.have_stored_uuids: self._store_client.update( anno_msg, message_query='{"uuid":"%s"}' % uuid ) else: self._store_client.insert(anno_msg) counter += 1 # function to provide corresponding uuids based on time, human position, and # robot's position from UBD def _find_traj_frm_pos(self, header, point, robot): uuids = list() for uuid, traj in self.trajectories.traj.iteritems(): stamps = [i[0].header.stamp for i in traj.humrobpose] index = self._header_index(header.stamp, stamps) points = [i[1].position for i in traj.humrobpose] index = self._point_index(robot.position, points, index) points = [i[0].pose.position for i in traj.humrobpose] index = self._point_index(point, points, index) if len(index) != 0: uuids.append(uuid.encode('ascii', 'ignore')) return uuids # function that returns indexes of time stamps whenever time stamp from UBD # matches time stamps from trajectories def _header_index(self, stamp, stamps): index = list() for i in range(len(stamps)): if stamps[0] > stamp: break if (stamps[i] - stamp).secs in [0, -1]: index.append(i) return index # function that returns indexes of human positions whenever the position # provided by UBD matches the positions from trajectories def _point_index(self, point, points, index=list()): index2 = list() dist = 0.1 for i in index: if abs(point.x-points[i].x) < dist and abs(point.y-points[i].y) < dist: index2.append(i) return index2
class ActionResultDB(object): loaded_types = [] useless_types = ['std_msgs/Header'] # message types not but action (goal|result) subscribers = {} # topicname:subscriber def __init__(self): # TODO self.db_name = rospy.get_param('robot/database','jsk_robot_lifelog') try: self.col_name = rospy.get_param('robot/name') except KeyError as e: rospy.logerr("please specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.update_cycle = rospy.get_param('~update_cycle', 1.0) self.joint_tolerance = 1.0 self.joint_states = [] self.joint_states_inserted = [] # list of time stamp self.joint_sub = rospy.Subscriber('/joint_states', JointState, self._joint_states_cb) self._load_params() self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) def _load_params(self): try: self.action_name_white_list = rospy.get_param('~white_list')['name'] rospy.loginfo("whitelist_name: %s", self.action_name_white_list) except: self.action_name_white_list = None try: self.action_name_black_list = rospy.get_param('~black_list')['name'] rospy.loginfo("blacklist_name: %s", self.action_name_black_list) except: self.action_name_black_list = None try: self.action_type_white_list = rospy.get_param('~white_list')['type'] rospy.loginfo("whitelist_type: %s", self.action_type_white_list) except: self.action_type_white_list = None try: self.action_type_black_list = rospy.get_param('~black_list')['type'] rospy.loginfo("blacklist_name: %s", self.action_type_black_list) except: self.action_type_black_list = None # callback functions def _action_goal_cb(self, topic, type_name, msg): self._insert_action_goal(topic,type_name,msg) def _action_result_cb(self, topic, type_name, msg): self._insert_action_result(topic,type_name,msg) key_func = (lambda js: abs(js.header.stamp-msg.header.stamp)) nearest_state = min(self.joint_states,key=key_func) self._insert_joint_states(nearest_state) def _joint_states_cb(self, msg): self.joint_states = [m for m in self.joint_states if (msg.header.stamp - m.header.stamp).to_sec() < self.joint_tolerance] + [msg] self.joint_states_inserted = [s for s in self.joint_states_inserted if (msg.header.stamp - s).to_sec() < self.joint_tolerance] # insert functions def _insert_action_goal(self,topic,type_name,msg): try: res = self.msg_store.insert(msg) rospy.loginfo("inserted action_goal message: %s (%s) -> %s", topic, type_name, res) except Exception as e: rospy.logerr("failed to insert action goal: %s (%s) -> %s", topic, type_name, e) def _insert_action_result(self,topic,type_name,msg): try: res = self.msg_store.insert(msg) rospy.loginfo("inserted action_result message: %s (%s) -> %s", topic, type_name, res) except Exception as e: rospy.logerr("failed to insert action result: %s (%s) -> %s", topic, type_name, e) def _insert_joint_states(self, msg): try: if msg.header.stamp in self.joint_states_inserted: return res = self.msg_store.insert(msg) rospy.loginfo("inserted joint_states message: %s", res) except Exception as e: rospy.logerr("failed to insert joint states: %s", e) # if the message type is goal or result, return the callback def _message_callback_type(self,name,type_name,type_obj): if not hasattr(type_obj,'header'): return None # no header message if type(type_obj.header) != std_msgs.msg.Header: return None # custom header message if hasattr(type_obj,'goal_id') and hasattr(type_obj,'goal') and type(type_obj.goal_id) == actionlib_msgs.msg.GoalID: return (lambda msg: self._action_goal_cb(name,type_name,msg)) if hasattr(type_obj,'status') and hasattr(type_obj,'result') and type(type_obj.status) == actionlib_msgs.msg.GoalStatus: return (lambda msg: self._action_result_cb(name,type_name,msg)) else: return None def sleep_one_cycle(self): rospy.sleep(self.update_cycle) # subscriber updater def update_subscribers(self): # check new publishers topics = rospy.client.get_published_topics() if self.action_name_white_list: topics = [t for t in topics if t[0] in self.action_name_white_list] if self.action_type_white_list: topics = [t for t in topics if t[1].split('/')[1] in self.action_type_white_list] if self.action_name_black_list: topics = [t for t in topics if not t[0] in self.action_name_black_list] if self.action_type_black_list: topics = [t for t in topics if not t[1].split('/')[1] in self.action_type_black_list] for topic_name, topic_type in topics: (pkg_name, msg_type) = topic_type.split('/') py_topic_class = '%s.msg.%s' % (pkg_name, msg_type) if py_topic_class in self.useless_types: continue if topic_name in self.subscribers.keys(): continue # Import and Check try: pypkg = __import__('%s.msg' % pkg_name) rospy.loginfo('imported %s.msg', pkg_name) except Exception as e: rospy.logerr('failed to import %s.msg: %s', pkg_name, e) rospy.logerr('please catkin_make %s', pkg_name) self.useless_types += [py_topic_class] continue try: type_class = getattr(getattr(pypkg, 'msg'), msg_type) type_instance = type_class() except Exception as e: rospy.logerr('failed to instantiate %s.msg.%s: %s', pkg_name, msg_type, e) self.useless_types += [py_topic_class] continue try: cb_obj = self._message_callback_type(topic_name, msg_type, type_instance) if cb_obj == None: self.useless_types += [py_topic_class] continue self.subscribers[topic_name] = rospy.Subscriber(topic_name, type_class, cb_obj) rospy.loginfo("start subscribe (topic=%s type=%s)", topic_name, msg_type) except Exception as e: self.useless_types += [py_topic_class] rospy.logerr('error registering subscriber: %s', e) continue
class MoveBaseDB(object): def __init__(self): self.db_name = rospy.get_param('robot/database','jsk_robot_lifelog') try: self.col_name = rospy.get_param('robot/name') except KeyError as e: rospy.logerr("please specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.update_cycle = rospy.get_param('~update_cycle', 1.0) self.map_frame = rospy.get_param('~map_frame','map') self.robot_frame = rospy.get_param('~robot_frame','base_link') self.tf_listener = tf2_ros.BufferClient("tf2_buffer_server") self.initialpose_pub = rospy.Publisher('/initialpose', geometry_msgs.msg.PoseWithCovarianceStamped) self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) rospy.loginfo("map->robot: %s -> %s" % (self.map_frame, self.robot_frame)) self.current_pose = None self.latest_pose = None self.latest_stamp = rospy.Time(0) self._load_latest_pose() self.pub_latest_pose() self.latest_exception = None def _insert_pose_to_db(self, map_to_robot): try: res = self.msg_store.insert(map_to_robot) rospy.loginfo("inserted %s : %s" % (res, map_to_robot)) except Exception as e: rospy.logerr('failed to insert current robot pose to db: %s', e) def _load_latest_pose(self): updated = None try: updated = self.msg_store.query('geometry_msgs/TransformStamped', single=True, sort_query=[("$natural", -1)]) except Exception as e: rospy.logerr('failed to load latest pose from db: %s' % e) if 'master has changed' in str(e): self._load_latest_pose() else: return if updated is not None: rospy.loginfo('found latest pose %s' % updated) self.current_pose = updated[0] self.latest_pose = updated[0] def _need_update_db(self, t): if self.current_pose is None: if self.latest_pose is None: return True else: return False diffthre = 0.1 + 1.0 / (rospy.Time.now() - self.latest_stamp).to_sec() diffpos = sum([(t.transform.translation.x - self.current_pose.transform.translation.x) ** 2, (t.transform.translation.y - self.current_pose.transform.translation.y) ** 2, (t.transform.translation.z - self.current_pose.transform.translation.z) ** 2]) ** 0.5 diffrot = sum([(t.transform.rotation.x - self.current_pose.transform.rotation.x) ** 2, (t.transform.rotation.y - self.current_pose.transform.rotation.y) ** 2, (t.transform.rotation.z - self.current_pose.transform.rotation.z) ** 2, (t.transform.rotation.w - self.current_pose.transform.rotation.w) ** 2]) ** 0.5 rospy.loginfo("diffthre: %f, diffpos: %f, diffrot: %f" % (diffthre, diffpos, diffrot)) if diffthre < diffpos or diffthre / 2.0 < diffrot: return True else: return False def insert_current_pose(self): try: transform = self.tf_listener.lookup_transform(self.map_frame,self.robot_frame,rospy.Time(0)) # rospy.loginfo("current pr2 location: %s" % transform) if self._need_update_db(transform): self._insert_pose_to_db(transform) self.current_pose = transform except Exception as e: if not self.latest_exception or str(e) is self.latest_exception: rospy.logwarn('failed to get current robot pose from tf: %s' % e) self.latest_exception = str(e) def sleep_one_cycle(self): rospy.sleep(self.update_cycle) def pub_latest_pose(self): if(self.latest_pose is not None and self.initialpose_pub.get_num_connections() > 0): pub_msg = geometry_msgs.msg.PoseWithCovarianceStamped() pub_msg.header.stamp = rospy.Time(0) pub_msg.header.frame_id = '/map' pub_msg.pose.covariance = [0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06853891945200942] pub_msg.pose.pose.position.x = self.latest_pose.transform.translation.x pub_msg.pose.pose.position.y = self.latest_pose.transform.translation.y pub_msg.pose.pose.position.z = self.latest_pose.transform.translation.z pub_msg.pose.pose.orientation = self.latest_pose.transform.rotation try: self.initialpose_pub.publish(pub_msg) self.latest_pose = None rospy.loginfo('published latest pose %s' % pub_msg) except Exception as e: rospy.logerr('failed to publish initial robot pose: %s' % e)
class PeriodicReplicatorClient(object): def __init__(self): super(PeriodicReplicatorClient, self).__init__() # parameters self.interval = rospy.get_param("~interval", 60 * 60 * 24) # default: 1 day self.delete_after_move = rospy.get_param("~delete_after_move", False) self.monitor_network = rospy.get_param("~monitor_network", False) self.poll_rate = min(300, self.interval) self.replicating = False self.lock = Lock() # database to be replicated self.database = rospy.get_param("robot/database") self.collections = rospy.get_param("~extra_collections", list()) try: self.collections.append(rospy.get_param("robot/name")) except KeyError as e: rospy.logerr("specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) # network monitoring self.network_connected = False if self.monitor_network: self.net_sub = rospy.Subscriber("/network/connected", Bool, self.network_connected_cb) # database interface self.date_msg_store = MessageStoreProxy(database=self.database, collection="replication") self.replicate_ac = actionlib.SimpleActionClient('move_mongodb_entries', MoveEntriesAction) for i in range(30): rospy.loginfo("waiting for advertise action /move_mongodb_entries ...[%d/30]" % i) if rospy.is_shutdown() or self.replicate_ac.wait_for_server(timeout=rospy.Duration(1)): break if not self.replicate_ac.wait_for_server(timeout=rospy.Duration(1)): rospy.logerr("Gave up waiting for advertise action /move_mongodb_entries") return # advertise service self.srv = rospy.Service("~replicate", Empty, self.replicate_srv_cb) self.poll_timer = rospy.Timer(rospy.Duration(self.poll_rate), self.timer_cb) rospy.loginfo("replication enabled: db: %s, collection: %s, interval: %d [sec]", self.database, self.collections, self.interval) def network_ok(self): if self.monitor_network: return self.network_connected else: return True def get_last_replicated(self): try: last_replicated = self.date_msg_store.query( StringList._type, single=True, sort_query=[("$natural",-1)]) date = last_replicated[1]["inserted_at"] utcdate = date.astimezone(pytz.utc).replace(tzinfo=None) epoch = datetime(1970, 1, 1) seconds = (utcdate - epoch).total_seconds() return rospy.Time.from_seconds(seconds) except Exception as e: rospy.logerr(e) return rospy.Time(0) def insert_replicate_date(self): try: self.date_msg_store.insert(StringList(self.collections)) except Exception as e: rospy.logwarn("failed to insert last replicate date to database: %s", e) def move_entries(self, move_before=None, delete_after_move=False, query=None): move_before = move_before or rospy.Duration(self.interval) if query is None: query = {} query = StringPairList( pairs=[StringPair(first=MongoQueryMsgRequest.JSON_QUERY, second=json_util.dumps(query))]) goal = MoveEntriesGoal(database=self.database, collections=StringList(self.collections), move_before=move_before, delete_after_move=delete_after_move, query=query) self.replicate_ac.send_goal(goal, done_cb=self.done_cb, active_cb=self.active_cb, feedback_cb=self.feedback_cb) while not self.replicate_ac.wait_for_result(timeout=rospy.Duration(5.0)): if rospy.is_shutdown(): break elif self.monitor_network and not self.network_connected: rospy.loginfo("disconnected wired network connection. canceling replication...") self.replicate_ac.cancel_all_goals() def do_replicate(self): with self.lock: try: self.replicating = True rospy.loginfo("Starting to replicate persistent data") # first replicate persistent data without removal move_before = rospy.Duration(self.interval) self.move_entries(move_before=move_before, delete_after_move=False, query={"_meta.persistent": True}) # then replicate all data with removal rospy.loginfo("Starting to replicate target data") self.move_entries(move_before=move_before, delete_after_move=self.delete_after_move, query={"_meta.persistent": {"$exists": False}}) except Exception as e: rospy.logerr("Replication failed: %s" % e) finally: self.replicating = False def timer_cb(self, event=None): if self.replicate_ac.get_state() == GoalStatus.ACTIVE: return if not self.network_ok(): rospy.loginfo("No network connection. Skipping replication.") return try: now = event.current_real except: now = rospy.Time.now() last_replicated = self.get_last_replicated() if (now - last_replicated).to_sec() < self.interval: rospy.loginfo("Interval %d < %d. skipping replication.", (now - last_replicated).to_sec(), self.interval) return if not self.replicating: self.do_replicate() def replicate_srv_cb(self, req): if self.replicating: rospy.loginfo("Replication is already started") else: self.do_replicate() return EmptyResponse() def done_cb(self, status, result): if status == GoalStatus.SUCCEEDED: rospy.loginfo("Replication suceeded") self.insert_replicate_date() else: rospy.logwarn("Replication finished with status %s" % status) def active_cb(self): if not self.network_ok(): rospy.loginfo("disconnected wired network connection. canceling replication...") self.replicate_ac.cancel_all_goals() def feedback_cb(self, feedback): rospy.loginfo(feedback) def network_connected_cb(self, msg): self.network_connected = msg.data
class ActionServer(object): # create messages that are used to publish feedback/result _feedback = CheckObjectPresenceFeedback() _result = CheckObjectPresenceResult() def __init__(self, name): self._action_name = name self._as = actionlib.SimpleActionServer(self._action_name, CheckObjectPresenceAction, execute_cb=self.execute_cb, auto_start = False) self._ptu_pub = rospy.Publisher("/ptu/cmd", JointState) self._ptu_angles = JointState() self._ptu_angles.name = ['pan', 'tilt'] self._ptu_angles.velocity = [0.6, 0.6] rospy.loginfo("Waiting for service '/recognition_service/mp_recognition'") rospy.wait_for_service('/recognition_service/mp_recognition') self.id_service = rospy.ServiceProxy("/recognition_service/mp_recognition", recognize) self.blog_msg_store = MessageStoreProxy(collection='robblog') self._as.start() rospy.loginfo("Action server up: %s"%self._action_name) def command_ptu(self, pan, tilt): self._ptu_angles.position = [pan / 180.0 * math.pi, tilt / 180.0 * math.pi] self._ptu_pub.publish(self._ptu_angles) rospy.sleep(5) def send_feedback(self, txt): self._feedback.status = txt self._as.publish_feedback(self._feedback) rospy.loginfo(txt) def execute_cb(self, goal): # move the ptu self.send_feedback("Moving PTU to %f, %f"%(goal.ptu_pan, goal.ptu_tilt)) self.command_ptu(goal.ptu_pan, goal.ptu_tilt) observation = Observation.make_observation(topics=[("/amcl_pose", PoseWithCovarianceStamped), ("/head_xtion/rgb/image_color", Image), ("/head_xtion/rgb/camera_info", CameraInfo), ("/head_xtion/depth_registered/points", PointCloud2), ("/head_xtion/depth/camera_info", CameraInfo), ("/ptu/state", JointState)]) self.send_feedback("Getting pointcloud") try: # cloud = rospy.wait_for_message("/head_xtion/depth_registered/points", PointCloud2, 5) cloud = observation.get_message('/head_xtion/depth_registered/points') except: self.send_feedback("Failed to get a pointcloud") self._result.found = 0 self._as.set_succeeded(self._result) return self.send_feedback("Returning PTU home") self.command_ptu(0, 0) object_searched = goal.object_id rospy.loginfo("Looking for object with id: %s"%object_searched) self.send_feedback("Calling object identification service.") try: result = self.id_service(cloud) except: self._result.found = 0 self._as.set_succeeded(self._result) return rospy.loginfo("Number of objects found: %s"%len(result.ids)) found = False world = World() objects="" for i in result.ids: rospy.loginfo(" - found %s"%i.data) objects+="* %s\n"%i.data[:-4] # TODO: Should project all other objects into observation and "kill" if not visible new_object = world.create_object() # TODO: store object pose etc # TODO: the classification should include class info classification = {} identification = {i.data[:-4]:1} new_object.add_identification("ObjectInstanceRecognition", ObjectIdentification(classification, identification)) if i.data == object_searched: found = True break if found: rospy.loginfo("Found %s"%object_searched) self._result.found = 1 self._as.set_succeeded(self._result) else: rospy.loginfo("Did not find %s"%object_searched) self._result.found = 0 self._as.set_succeeded(self._result) try: waypoint=rospy.wait_for_message("/current_node",String,5) except: waypoint=String("-") title = 'Fire Extinguisher Check' body = 'I checked for `%s` at %s and it was '%(object_searched[:-4],waypoint.data) if found: body += '*present*' else: body += '*not in place*' body += '.\n\nHere is an image:\n\n' msg_store_blog = MessageStoreProxy(collection='robblog') img = observation.get_message('/head_xtion/rgb/image_color') img_id = msg_store_blog.insert(img) body += '![Image of the location](ObjectID(%s))' % img_id if len(objects)>0: body+='\n\nI found:\n\n' body+=objects e = rb_utils.create_timed_entry(title=title, body=body) self.blog_msg_store.insert(e)
class PeriodicReplicatorClient(Thread): def __init__(self): Thread.__init__(self) self.dead = Event() self.replicate_interval = rospy.get_param("replication/interval", 60 * 60 * 24) # default: 1 day self.delete_after_move = rospy.get_param("replication/delete_after_move", False) self.database = rospy.get_param("robot/database") self.collections = rospy.myargv()[1:] try: self.collections.append(rospy.get_param("robot/name")) except KeyError as e: rospy.logerr("specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.periodic = rospy.get_param("replication/periodic", True) if self.periodic: rospy.loginfo("periodic replication interval: %d [sec]", self.replicate_interval) self.disable_on_wireless_network = rospy.get_param("replication/disable_on_wireless_network", False) if self.disable_on_wireless_network: self.network_connected = False self.net_sub = rospy.Subscriber("/network/connected", Bool, self.network_connected_cb) else: self.replicate_interval = 1 self.date_msg_store = MessageStoreProxy(database=self.database, collection="replication") self.replicate_ac = actionlib.SimpleActionClient('move_mongodb_entries', MoveEntriesAction) rospy.loginfo("waiting for service advertise /move_mongodb_entries ...") self.replicate_ac.wait_for_server() rospy.loginfo("replication enabled: db: %s, collection: %s, periodic: %s", self.database, self.collections, self.periodic) def run(self): while not self.dead.wait(self.replicate_interval): if self.disable_on_wireless_network and not self.network_connected: rospy.loginfo("no wired network connection. skipping replication...") else: move_before = self.time_after_last_replicate_date() self.move_entries(move_before) self.insert_replicate_date() if not self.periodic: self.exit() def time_after_last_replicate_date(self): delta = 0 try: last_replicated = self.date_msg_store.query(StringList._type, single=True, sort_query=[("$natural",-1)]) date = last_replicated[1]["inserted_at"] rospy.loginfo("last replicated at %s", date) except Exception as e: rospy.logwarn("failed to search last replicated date from database: %s", e) finally: return rospy.Duration(delta) def insert_replicate_date(self): try: self.date_msg_store.insert(StringList(self.collections)) except Exception as e: rospy.logwarn("failed to insert last replicate date to database: %s", e) def move_entries(self, move_before): goal = MoveEntriesGoal(database=self.database, collections=StringList(self.collections), move_before=move_before, delete_after_move=self.delete_after_move) self.replicate_ac.send_goal(goal, active_cb=self.active_cb, feedback_cb=self.feedback_cb) while not self.replicate_ac.wait_for_result(timeout=rospy.Duration(5.0)): if self.disable_on_wireless_network and not self.network_connected: rospy.loginfo("disconnected wired network connection. canceling replication...") self.cancel() def active_cb(self): if self.disable_on_wireless_network and not self.network_connected: rospy.loginfo("disconnected wired network connection. canceling replication...") self.cancel() def feedback_cb(self, feedback): rospy.loginfo(feedback) def cancel(self): self.replicate_ac.cancel_all_goals() def exit(self): self.cancel() self.dead.set() def network_connected_cb(self, msg): self.network_connected = msg.data
from geometry_msgs.msg import Pose if __name__ == '__main__': rospy.init_node('insert_empty_map') if len(sys.argv) != 2: rospy.logwarn("usage: insert_empty_map.py pointset_name") sys.exit(1) pointset = sys.argv[1] map_loader = YamlMapLoader() if pointset in map_loader.get_maps(): rospy.logwarn( "Map with name {0} already exists. Try another one.".format( sys.argv[1])) sys.exit(1) msg_store = MessageStoreProxy(collection='topological_maps') empty_node = TopologicalNode() empty_node.name = "temp_node" empty_node.map = "unused" empty_node.pointset = pointset meta = {} meta['pointset'] = pointset meta['map'] = "unused" meta['node'] = "temp_node" msg_store.insert(empty_node, meta)
n.map = i.map n.pointset = i.pointset p = Pose() p.position.x = float(i.waypoint[0]) p.position.y = float(i.waypoint[1]) p.position.z = float(i.waypoint[2]) p.orientation.x = float(i.waypoint[3]) p.orientation.y = float(i.waypoint[4]) p.orientation.z = float(i.waypoint[5]) p.orientation.w = float(i.waypoint[6]) n.pose = p for j in i.vertices: v = Vertex() v.x = float(j[0]) v.y = float(j[1]) n.verts.append(v) for k in i.edges: e = Edge() e.node = k['node'] e.action = k['action'] eid = get_edge_id(i.name, e.node, eids) eids.append(eid) e.edge_id = eid e.top_vel = 0.55 e.map_2d = map_name n.edges.append(e) print n msg_store.insert(n, meta) #mongodb_store.util.store_message(points_db,p,val)
import StringIO if __name__ == '__main__': rospy.init_node("example_message_store_client") msg_store = MessageStoreProxy() p = Pose(Point(0, 1, 2), Quaternion(3, 4, 5, 6)) try: # insert a pose object with a name, store the id from db p_id = msg_store.insert_named("my favourite pose", p) # you don't need a name (note that this p_id is different than one above) p_id = msg_store.insert(p) p_id = msg_store.insert(['test1', 'test2']) # get it back with a name print msg_store.query_named("my favourite pose", Pose._type) p.position.x = 666 # update it with a name msg_store.update_named("my favourite pose", p) p.position.y = 2020 # update the other inserted one using the id msg_store.update_id(p_id, p)
class SaveLocations(): def __init__(self): rospy.logdebug("Intialising logging") self.robot_pose = Pose() self.current_node = "none" self.current_edge = "none" self.closest_node = "none" self.tfl = tf.TransformListener() self.dataset_name = "tracks" self.target_frame = "/map" self.msg_store = MessageStoreProxy(collection="people_perception") manager_topic = rospy.get_param("~manager_topic", "") rospy.Subscriber( "/robot_pose", Pose, callback=self.pose_callback, queue_size=10 ) rospy.Subscriber( "/current_node", String, callback=self.node_callback, queue_size=10 ) rospy.Subscriber( "/current_edge", String, callback=self.edge_callback, queue_size=10 ) rospy.Subscriber( "/closest_node", String, callback=self.closest_callback, queue_size=10 ) subs = [ message_filters.Subscriber( "/people_tracker/positions", PeopleTracker ) ] if not manager_topic == '': subs += [message_filters.Subscriber(manager_topic, LoggingManager)] ts = message_filters.ApproximateTimeSynchronizer( subs, 10, 0.5 ) ts.registerCallback(self.people_callback) def transform(self, pose, target_frame): try: self.tfl.waitForTransform( target_frame, pose.header.frame_id, pose.header.stamp, rospy.Duration(3.0) ) return self.tfl.transformPose(target_frame=target_frame, ps=pose) except ( tf.Exception, tf.ConnectivityException, tf.LookupException, tf.ExtrapolationException ) as e: rospy.logwarn(e) return None return None def people_callback(self, pl, *mgr): if len(mgr) and not mgr[0].log: return if not len(pl.distances): return meta = {} meta["people"] = self.dataset_name rospy.logdebug( "Person detected. " "Logging to people_perception collection." ) insert = PeopleTrackerLogging() insert.header = pl.header insert.uuids = pl.uuids for p in pl.poses: tp = self.transform(PoseStamped(header=pl.header, pose=p), self.target_frame) if tp: insert.people.append(tp) insert.robot = self.robot_pose insert.people_tracker = pl insert.closest_node = self.closest_node insert.current_edge = self.current_edge insert.current_node = self.current_node self.msg_store.insert(insert, meta) def pose_callback(self, pose): self.robot_pose = pose def node_callback(self, msg): self.current_node = msg.data def edge_callback(self, msg): self.current_edge = msg.data def closest_callback(self, msg): self.closest_node = msg.data
class PeopleCounter(object): def __init__(self, config, region_categories=dict(), coll='activity_robblog'): rospy.loginfo("Starting activity checking...") self.region_categories = region_categories self._lock = False # regions = {roi:polygon} and soma map info self.regions, self.soma_map = get_soma_info(config) self.reset() # tf stuff self._tfl = tf.TransformListener() # create db rospy.loginfo("Create database collection %s..." % coll) self._db = MessageStoreProxy(collection=coll) self._db_image = MessageStoreProxy(collection=coll+"_img") self._ubd_db = MessageStoreProxy(collection="upper_bodies") # service client to upper body logging rospy.loginfo("Create client to /vision_logging_service/capture...") self.capture_srv = rospy.ServiceProxy( "/vision_logging_service/capture", CaptureUBD ) # subscribing to ubd topic subs = [ message_filters.Subscriber( rospy.get_param( "~ubd_topic", "/upper_body_detector/bounding_box_centres" ), PoseArray ), message_filters.Subscriber( rospy.get_param("~tracker_topic", "/people_tracker/positions"), PeopleTracker ) ] ts = message_filters.ApproximateTimeSynchronizer( subs, queue_size=5, slop=0.15 ) ts.registerCallback(self.cb) def reset(self): # start modified code self.detected_time = dict() # end modified code self.uuids = {roi: list() for roi, _ in self.regions.iteritems()} self.image_ids = {roi: list() for roi, _ in self.regions.iteritems()} self.people_poses = list() self._stop = False self._ubd_pos = list() self._tracker_pos = list() self._tracker_uuids = list() def cb(self, ubd_cent, pt): if not self._lock: self._lock = True self._tracker_uuids = pt.uuids self._ubd_pos = self.to_world_all(ubd_cent) self._tracker_pos = [i for i in pt.poses] self._lock = False def to_world_all(self, pose_arr): transformed_pose_arr = list() try: fid = pose_arr.header.frame_id for cpose in pose_arr.poses: ctime = self._tfl.getLatestCommonTime(fid, "/map") pose_stamped = PoseStamped(Header(1, ctime, fid), cpose) # Get the translation for this camera's frame to the world. # And apply it to all current detections. tpose = self._tfl.transformPose("/map", pose_stamped) transformed_pose_arr.append(tpose.pose) except tf.Exception as e: rospy.logwarn(e) # In case of a problem, just give empty world coordinates. return [] return transformed_pose_arr def stop_check(self): self._stop = True def _is_new_person(self, ubd_pose, track_pose, tracker_ind): pose_inside_roi = '' # merge ubd with tracker pose cond = euclidean( [ubd_pose.position.x, ubd_pose.position.y], [track_pose.position.x, track_pose.position.y] ) < 0.2 # uuid must be new if cond: is_new = True for roi, uuids in self.uuids.iteritems(): if self._tracker_uuids[tracker_ind] in uuids: is_new = False break cond = cond and is_new if cond: # this pose must be inside a region for roi, region in self.regions.iteritems(): if region.contains( Point(ubd_pose.position.x, ubd_pose.position.y) ): pose_inside_roi = roi break cond = cond and (pose_inside_roi != '') if cond: is_near = False for pose in self.people_poses: if euclidean( pose, [ubd_pose.position.x, ubd_pose.position.y] ) < 0.3: is_near = True break cond = cond and (not is_near) return cond, pose_inside_roi def _uuids_roi_to_category(self, dict_uuids): result = dict() for roi, uuids in dict_uuids.iteritems(): region_category = roi if region_category in self.region_categories: region_category = self.region_categories[region_category] if region_category not in result: result[region_category] = (list(), list()) result[region_category][0].append(roi) result[region_category][1].extend(uuids) return result def _create_robmsg(self, start_time, end_time): regions_to_string = dict() regions_to_string_img = dict() for region_category, (rois, uuids) in self._uuids_roi_to_category(self.uuids).iteritems(): if region_category not in regions_to_string: regions_to_string[region_category] = '# Activity Report \n' regions_to_string[region_category] += ' * **Regions:** %s \n' % str(rois) regions_to_string[region_category] += ' * **Area:** %s \n' % region_category regions_to_string[region_category] += ' * **Start time:** %s \n' % str(start_time) regions_to_string[region_category] += ' * **End time:** %s \n' % str(end_time) regions_to_string[region_category] += ' * **Summary:** %d person(s) were detected \n' % len(uuids) regions_to_string[region_category] += ' * **Details:** \n\n' regions_to_string_img[region_category] = copy.copy(regions_to_string[region_category]) for roi in rois: for ind, uuid in enumerate(self.uuids[roi]): try: detected_time = self.detected_time[uuid] except: detected_time = start_time detected_time = datetime.datetime.fromtimestamp(detected_time.secs) regions_to_string[region_category] += '%s was detected at %s \n\n' % (uuid, detected_time) regions_to_string_img[region_category] += "![%s](ObjectID(%s)) " % ( uuid, self.image_ids[roi][ind] ) regions_to_string_img[region_category] += 'was detected at %s \n\n' % str(detected_time) entries = list() for region_category, string_body in regions_to_string.iteritems(): entries.append(RobblogEntry( title="%s Activity Report - %s" % (start_time.date(), region_category), body=string_body )) entry_images = list() for region_category, string_body in regions_to_string_img.iteritems(): entry_images.append(RobblogEntry( title="%s Activity Report - %s" % (start_time.date(), region_category), body=string_body )) return entries, entry_images def _store(self, start_time, end_time): rospy.loginfo("Storing location and the number of detected persons...") start_time = datetime.datetime.fromtimestamp(start_time.secs) end_time = datetime.datetime.fromtimestamp(end_time.secs) entries, entry_images = self._create_robmsg(start_time, end_time) for entry in entries: self._db.insert(entry) for entry in entry_images: self._db_image.insert(entry) def continuous_check(self, duration): rospy.loginfo("Start looking for people...") start_time = rospy.Time.now() end_time = rospy.Time.now() while (end_time - start_time) < duration and not self._stop: if not self._lock: self._lock = True for ind_ubd, i in enumerate(self._ubd_pos): for ind, j in enumerate(self._tracker_pos): cond, pose_inside_roi = self._is_new_person(i, j, ind) if cond: result = self.capture_srv() rospy.sleep(0.1) _id = "" if len(result.obj_ids) > 0: ubd_log = self._ubd_db.query_id( result.obj_ids[0], LoggingUBD._type ) try: _id = self._db_image.insert( ubd_log[0].ubd_rgb[ind_ubd] ) except: rospy.logwarn( "Missed the person to capture images..." ) self.image_ids[pose_inside_roi].append(_id) # self.uuids.append(self._tracker_uuids[ind]) self.uuids[pose_inside_roi].append( self._tracker_uuids[ind] ) if self._tracker_uuids[ind] not in self.detected_time: self.detected_time[self._tracker_uuids[ind]] = end_time self.people_poses.append( [i.position.x, i.position.y] ) rospy.loginfo( "%s is detected in region %s - (%.2f, %.2f)" % ( self._tracker_uuids[ind], pose_inside_roi, i.position.x, i.position.y ) ) self._lock = False end_time = rospy.Time.now() rospy.sleep(0.1) self._store(start_time, end_time) self._stop = False
class YamlMapLoader(object): def __init__(self): self.msg_store = MessageStoreProxy(collection='topological_maps') def get_maps(self): """ Queries the database and returns details of the available topological maps. :return: A dictionary where each key is the name of a topological map and each item is a dictionary of details. Details are: "number_nodes" ; integer "edge_actions" : list of action server names used for traversal "last_modified" : datetime.datetime object for the last time a node was inserted """ maps = dict() nodes = self.msg_store.query(TopologicalNode._type) for node in nodes: pointset = node[1]["pointset"] if not maps.has_key(pointset): maps[pointset] = {"number_nodes": 0, "edge_actions": set(), "last_modified": ""} maps[pointset]["number_nodes"] += 1 if (maps[pointset]["last_modified"] == "" or node[1]["inserted_at"] > maps[pointset]["last_modified"]): maps[pointset]["last_modified"] = node[1]["inserted_at"] for edge in node[0].edges: maps[pointset]["edge_actions"].add(edge.action) return maps def _load_yaml(self, filename): rospy.loginfo("loading %s"%filename) with open(filename, 'r') as f: return yaml.load(f) def read_maps(self, p): data = [] if os.path.isdir(p): for f in os.listdir(p): if f.endswith(".yaml"): data.append(self._load_yaml(p+'/'+f)) else: data.append(self._load_yaml(p)) return data def insert_maps(self, data, new_pointset=None, force=False): current_maps = self.get_maps() for idx, tmap in enumerate(data): pointset = None if new_pointset != None: # If there are more than one map, it takes the custom pointset and appends an index pointset = new_pointset+str(idx+1) if len(data) > 1 else new_pointset first_node = True for i in tmap: try: meta = i['meta'] meta['pointset'] = pointset if pointset != None else meta['pointset'] if meta['pointset'] in current_maps and first_node: first_node = False if not force: rospy.logwarn("Map '%s' already in datacentre, skipping! Use -f to force override or change pointset name with --pointset" % meta['pointset']) break else: topo_map = topological_map(meta['pointset']) topo_map.delete_map() elif first_node: first_node = False rospy.loginfo("Inserting map: %s" % meta['pointset']) msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode) self.msg_store.insert(msgv,meta) except TypeError: pass # Not a topo map
class SOMAROIManager(): def __init__(self, soma_map, soma_conf, config_file=None): self.soma_map = soma_map self.soma_conf = soma_conf if config_file: self._config_file = config_file else: # default file rp = RosPack() path = rp.get_path('soma_roi_manager') + '/config/' filename = 'default.json' self._config_file=path+filename self._soma_obj_ids = dict() self._soma_obj_msg = dict() self._soma_obj_roi_ids = dict() self._soma_obj_roi = dict() self._soma_obj_type = dict() self._soma_obj_pose = dict() self._interactive = True self._msg_store=MessageStoreProxy(collection="soma_roi") self._gs_store=GeoSpatialStoreProxy(db="geospatial_store", collection="soma") self._server = InteractiveMarkerServer("soma_roi") self._init_types() self._init_menu() self.load_objects() rospy.spin() def _init_types(self): # read from config in soma_objects with open(self._config_file) as config_file: config = json.load(config_file) self.mesh = dict() for k, v in config['roi'].iteritems(): self.mesh[k] = v def _init_menu(self): self.menu_handler = MenuHandler() add_point_entry = self.menu_handler.insert( "Add Point", callback=self._add_point_cb) del_point_entry = self.menu_handler.insert( "Delete Point", callback=self._del_point_cb) add_entry = self.menu_handler.insert( "Add ROI" ) self.menu_item = dict() for k in sorted(self.mesh.keys()): entry = self.menu_handler.insert(k, parent=add_entry, callback=self._add_cb) self.menu_item[entry] = k del_entry = self.menu_handler.insert( "Delete ROI", callback=self._del_cb) enable_entry = self.menu_handler.insert( "Movement control", callback=self._enable_cb ) self.menu_handler.setCheckState( enable_entry, MenuHandler.CHECKED ) def _add_cb(self, feedback): rospy.loginfo("Add ROI: %s", self.menu_item[feedback.menu_entry_id]) self.add_object(self.menu_item[feedback.menu_entry_id], feedback.pose) def _del_cb(self, feedback): rospy.loginfo("Delete ROI: %s", feedback.marker_name) roi = self._soma_obj_roi[feedback.marker_name] for r in self._soma_obj_roi_ids[roi]: self.delete_object(r) self.undraw_roi(roi) def _add_point_cb(self, feedback): rospy.loginfo("Add point: %s", feedback.marker_name) roi = self._soma_obj_roi[feedback.marker_name] t = self._soma_obj_type[feedback.marker_name] self.add_object(t, feedback.pose, roi) #self.draw_roi(roi) def _del_point_cb(self, feedback): rospy.loginfo("Delete point: %s", feedback.marker_name) self.delete_object(feedback.marker_name) roi = self._soma_obj_roi[feedback.marker_name] self.draw_roi(roi) def _update_poly(self, feedback): return def _update_cb(self, feedback): p = feedback.pose.position #print "Marker " + feedback.marker_name + " is now at " + str(p.x) + ", " + str(p.y) self._soma_obj_pose[feedback.marker_name] = feedback.pose roi = self._soma_obj_roi[feedback.marker_name] self.draw_roi(roi) if hasattr(self, "vp_timer_"+feedback.marker_name): getattr(self, "vp_timer_"+feedback.marker_name).cancel() setattr(self, "vp_timer_"+feedback.marker_name, Timer(3, self.update_object, [feedback])) getattr(self, "vp_timer_"+feedback.marker_name).start() def _enable_cb(self, feedback): handle = feedback.menu_entry_id state = self.menu_handler.getCheckState( handle ) if state == MenuHandler.CHECKED: self.menu_handler.setCheckState( handle, MenuHandler.UNCHECKED ) self._interactive = False else: self.menu_handler.setCheckState( handle, MenuHandler.CHECKED ) self._interactive = True self.menu_handler.reApply( self._server ) self.load_objects() self._server.applyChanges() def _next_id(self): self._soma_id += 1 return self._soma_id def _next_roi_id(self): self._soma_roi_id += 1 return self._soma_roi_id def _retrieve_objects(self): objs = self._msg_store.query(SOMAROIObject._type, message_query={"map": self.soma_map, "config": self.soma_conf}) max_id = 0 max_roi_id = 0 for o,om in objs: if int(o.id) > max_id: max_id = int(o.id) if int(o.roi_id) > max_roi_id: max_roi_id = int(o.roi_id) self._soma_id = max_id self._soma_roi_id = max_roi_id return objs def load_objects(self): self._soma_obj_roi_ids = dict() objs = self._retrieve_objects() # if collection is empty insert initial object if not objs: pose = Pose() self.add_object('Office', pose) return # otherwise, load all object from collection for o, om in objs: self._soma_obj_ids[o.id] = om['_id'] self._soma_obj_msg[o.id] = o if o.roi_id in self._soma_obj_roi_ids: self._soma_obj_roi_ids[o.roi_id].append(o.id) else: self._soma_obj_roi_ids[o.roi_id] = list() self._soma_obj_roi_ids[o.roi_id].append(o.id) self._soma_obj_roi[o.id] = o.roi_id self._soma_obj_type[o.id] = o.type self._soma_obj_pose[o.id] = o.pose self.load_object(o.id, o.roi_id, o.type, o.pose) self.draw_all_roi() def draw_all_roi(self): for key in self._soma_obj_roi_ids: self.draw_roi(key) def undraw_all_roi(self): for key in self._soma_obj_roi_ids: self.undraw_roi(key) def draw_roi(self, roi): v = self._soma_obj_roi_ids[roi] t = self._soma_obj_type[v[0]] p = self._soma_obj_pose[v[0]] int_marker = self.create_roi_marker(roi, t, p, v) self._server.erase("ROI-" + roi) self._server.applyChanges() self._server.insert(int_marker, self._update_poly) self._server.applyChanges() def undraw_roi(self, roi): self._server.erase("ROI-" + roi) self._server.applyChanges() def load_object(self, soma_id, roi, soma_type, pose): int_marker = self.create_object_marker(soma_id, roi, soma_type, pose) self._server.insert(int_marker, self._update_cb) self.menu_handler.apply( self._server, soma_id ) self._server.applyChanges() def add_object(self, soma_type, pose, roi_id=None): # todo: add to mongodb soma_id = self._next_id() if roi_id == None: soma_roi_id = self._next_roi_id() self._soma_obj_roi_ids[str(soma_roi_id)] = list() else: soma_roi_id = roi_id soma_obj = SOMAROIObject() soma_obj.id = str(soma_id) soma_obj.roi_id = str(soma_roi_id) soma_obj.map = str(self.soma_map) soma_obj.config = str(self.soma_conf) soma_obj.type = soma_type soma_obj.pose = pose soma_obj.frame = 'map' _id = self._msg_store.insert(soma_obj) self._soma_obj_ids[soma_obj.id] = _id self._soma_obj_msg[soma_obj.id] = soma_obj self._soma_obj_roi_ids[soma_obj.roi_id].append(soma_obj.id) self._soma_obj_roi[soma_obj.id] = soma_obj.roi_id self._soma_obj_type[soma_obj.id] = soma_type self._soma_obj_pose[soma_obj.id] = pose self.load_object(str(soma_id), soma_obj.roi_id, soma_type, pose) # geospatial store # delete old message res = self._gs_store.find_one({'soma_roi_id': soma_roi_id, 'soma_map': self.soma_map, 'soma_config': self.soma_conf}) if res: _gs_id = res['_id'] self._gs_store.remove(_gs_id) #rospy.loginfo("GS Store: deleted roi") # add new object to geospatial store geo_json = self.geo_json_from_soma_obj(soma_obj) if geo_json: try: self._gs_store.insert(geo_json) rospy.loginfo("GS Store: updated roi (%s %s)" % (soma_obj.type, soma_obj.roi_id) ) except: rospy.logerr("The polygon of %s %s is malformed (self-intersecting) => Please update geometry." % (soma_type, soma_roi_id)) def geo_json_from_soma_obj(self, soma_obj): geo_json = {} geo_json['soma_roi_id'] = soma_obj.roi_id geo_json['soma_map'] = soma_obj.map geo_json['soma_config'] = soma_obj.config geo_json['type'] = soma_obj.type if len(self._soma_obj_roi_ids[soma_obj.roi_id]) < 3: rospy.logerr("GS Store: %s %s, less then 3 points => Add more points or delete ROI." % (soma_obj.type, soma_obj.roi_id) ) return None coordinates = [] for obj_id in self._soma_obj_roi_ids[soma_obj.roi_id]: p = self._soma_obj_pose[obj_id] coordinates.append(self._gs_store.coords_to_lnglat(p.position.x, p.position.y)) p = self._soma_obj_pose[self._soma_obj_roi_ids[soma_obj.roi_id][0]] coordinates.append(self._gs_store.coords_to_lnglat(p.position.x, p.position.y)) geo_json['loc'] = {'type': 'Polygon', 'coordinates': [coordinates]} return geo_json def delete_object(self, soma_id): # todo: delete from mongodb _id = self._soma_obj_ids[str(soma_id)] self._msg_store.delete(str(_id)) self._server.erase(soma_id) self._server.applyChanges() roi = self._soma_obj_roi[str(soma_id)] nodes = self._soma_obj_roi_ids[roi] new_nodes = [] for n in nodes: if n != str(soma_id): new_nodes.append(n) self._soma_obj_roi_ids[roi] = new_nodes # geospatial store # delete old message old_msg = self._soma_obj_msg[soma_id] res = self._gs_store.find_one({'soma_roi_id': roi, 'soma_map': self.soma_map, 'soma_config': self.soma_conf}) if res: _gs_id = res['_id'] self._gs_store.remove(_gs_id) #rospy.loginfo("GS Store: deleted roi") # add new object to geospatial store geo_json = self.geo_json_from_soma_obj(old_msg) if geo_json: try: self._gs_store.insert(geo_json) rospy.loginfo("GS Store: updated roi (%s %s)" % (old_msg.type, old_msg.roi_id) ) except: rospy.logerr("The polygon of %s %s is malformed (self-intersecting) => Please update geometry." % (old_msg.type, old_msg.roi_id)) def update_object(self, feedback): rospy.loginfo("Updated marker: %s", feedback.marker_name) _id = self._soma_obj_ids[feedback.marker_name] msg = self._soma_obj_msg[feedback.marker_name] new_msg = copy.deepcopy(msg) new_msg.pose = feedback.pose self._soma_obj_pose[feedback.marker_name] = feedback.pose self._msg_store.update_id(_id, new_msg) # geospatial store # delete old message res = self._gs_store.find_one({'soma_roi_id': new_msg.roi_id, 'soma_map': self.soma_map, 'soma_config': self.soma_conf}) if res: _gs_id = res['_id'] self._gs_store.remove(_gs_id) #rospy.loginfo("GS Store: deleted roi") # add new object to geospatial store geo_json = self.geo_json_from_soma_obj(new_msg) if geo_json: try: self._gs_store.insert(geo_json) rospy.loginfo("GS Store: updated roi (%s %s)" % (new_msg.type, new_msg.roi_id) ) except: rospy.logerr("The polygon of %s %s is malformed (self-intersecting) => Please update geometry." % (new_msg.type, new_msg.roi_id)) def create_object_marker(self, soma_obj, roi, soma_type, pose): # create an interactive marker for our server int_marker = InteractiveMarker() int_marker.header.frame_id = "map" int_marker.name = soma_obj int_marker.description = soma_type + ' (' + roi + ')' int_marker.pose = pose int_marker.pose.position.z = 0.01 marker = Marker() marker.type = Marker.SPHERE marker.scale.x = 0.25 marker.scale.y = 0.25 marker.scale.z = 0.25 int_marker.pose.position.z = (marker.scale.z / 2) random.seed(soma_type) val = random.random() marker.color.r = r_func(val) marker.color.g = g_func(val) marker.color.b = b_func(val) marker.color.a = 1.0 #marker.pose = pose # create a control which will move the box # this control does not contain any markers, # which will cause RViz to insert two arrows control = InteractiveMarkerControl() control.orientation.w = 1 control.orientation.x = 0 control.orientation.y = 1 control.orientation.z = 0 control.interaction_mode = InteractiveMarkerControl.MOVE_PLANE if self._interactive: int_marker.controls.append(copy.deepcopy(control)) # add the control to the interactive marker int_marker.controls.append(control); # add menu control menu_control = InteractiveMarkerControl() menu_control.interaction_mode = InteractiveMarkerControl.BUTTON menu_control.always_visible = True menu_control.markers.append( marker) #makeBox(int_marker) ) int_marker.controls.append(menu_control) return int_marker def create_roi_marker(self, roi, soma_type, pose, points): #print "POINTS: " + str(points) int_marker = InteractiveMarker() int_marker.header.frame_id = "map" int_marker.name = "ROI-" + roi int_marker.description = roi int_marker.pose = pose marker = Marker() marker.type = Marker.LINE_STRIP marker.scale.x = 0.1 random.seed(soma_type) val = random.random() marker.color.r = r_func(val) marker.color.g = g_func(val) marker.color.b = b_func(val) marker.color.a = 1.0 control = InteractiveMarkerControl() control.always_visible = True control.markers.append( marker ) int_marker.controls.append(control ) marker.points = [] for point in points: p = Point() pose = self._soma_obj_pose[point] p.x = pose.position.x - int_marker.pose.position.x p.y = pose.position.y - int_marker.pose.position.y marker.points.append(p) p = Point() pose = self._soma_obj_pose[points[0]] p.x = pose.position.x - int_marker.pose.position.x p.y = pose.position.y - int_marker.pose.position.y marker.points.append(p) return int_marker
def execute_cb(self, goal): # move the ptu self.send_feedback("Moving PTU to %f, %f"%(goal.ptu_pan, goal.ptu_tilt)) self.command_ptu(goal.ptu_pan, goal.ptu_tilt) observation = Observation.make_observation(topics=[("/amcl_pose", PoseWithCovarianceStamped), ("/head_xtion/rgb/image_color", Image), ("/head_xtion/rgb/camera_info", CameraInfo), ("/head_xtion/depth_registered/points", PointCloud2), ("/head_xtion/depth/camera_info", CameraInfo), ("/ptu/state", JointState)]) self.send_feedback("Getting pointcloud") try: # cloud = rospy.wait_for_message("/head_xtion/depth_registered/points", PointCloud2, 5) cloud = observation.get_message('/head_xtion/depth_registered/points') except: self.send_feedback("Failed to get a pointcloud") self._result.found = 0 self._as.set_succeeded(self._result) return self.send_feedback("Returning PTU home") self.command_ptu(0, 0) object_searched = goal.object_id rospy.loginfo("Looking for object with id: %s"%object_searched) self.send_feedback("Calling object identification service.") try: result = self.id_service(cloud) except: self._result.found = 0 self._as.set_succeeded(self._result) return rospy.loginfo("Number of objects found: %s"%len(result.ids)) found = False world = World() objects="" for i in result.ids: rospy.loginfo(" - found %s"%i.data) objects+="* %s\n"%i.data[:-4] # TODO: Should project all other objects into observation and "kill" if not visible new_object = world.create_object() # TODO: store object pose etc # TODO: the classification should include class info classification = {} identification = {i.data[:-4]:1} new_object.add_identification("ObjectInstanceRecognition", ObjectIdentification(classification, identification)) if i.data == object_searched: found = True break if found: rospy.loginfo("Found %s"%object_searched) self._result.found = 1 self._as.set_succeeded(self._result) else: rospy.loginfo("Did not find %s"%object_searched) self._result.found = 0 self._as.set_succeeded(self._result) try: waypoint=rospy.wait_for_message("/current_node",String,5) except: waypoint=String("-") title = 'Fire Extinguisher Check' body = 'I checked for `%s` at %s and it was '%(object_searched[:-4],waypoint.data) if found: body += '*present*' else: body += '*not in place*' body += '.\n\nHere is an image:\n\n' msg_store_blog = MessageStoreProxy(collection='robblog') img = observation.get_message('/head_xtion/rgb/image_color') img_id = msg_store_blog.insert(img) body += '![Image of the location](ObjectID(%s))' % img_id if len(objects)>0: body+='\n\nI found:\n\n' body+=objects e = rb_utils.create_timed_entry(title=title, body=body) self.blog_msg_store.insert(e)
if __name__ == '__main__': rospy.init_node("example_message_store_client") msg_store = MessageStoreProxy() p = Pose(Point(0, 1, 2), Quaternion(3, 4, 5, 6)) try: # insert a pose object with a name, store the id from db p_id = msg_store.insert_named("my favourite pose", p) # you don't need a name (note that this p_id is different than one above) p_id = msg_store.insert(p) p_id = msg_store.insert(['test1', 'test2']) # get it back with a name print msg_store.query_named("my favourite pose", Pose._type) p.position.x = 666 # update it with a name msg_store.update_named("my favourite pose", p) p.position.y = 2020 # update the other inserted one using the id msg_store.update_id(p_id, p)
class SOMAManager(): def __init__(self, soma_map, soma_conf, config_file=None): self.soma_map = soma_map self.soma_conf = soma_conf if config_file: self._config_file = config_file else: # default file rp = RosPack() path = rp.get_path('soma_objects') + '/config/' filename = 'default.json' self._config_file=path+filename self._soma_obj_ids = dict() self._soma_obj_msg = dict() self._interactive = True self._msg_store=MessageStoreProxy(collection="soma") self._gs_store=GeoSpatialStoreProxy(db="geospatial_store", collection="soma") self._server = InteractiveMarkerServer("soma") self._init_types() self._init_menu() self.load_objects() rospy.spin() def _init_types(self): # read from config in soma_objects with open(self._config_file) as config_file: config = json.load(config_file) self.mesh = dict() self.marker = dict() if '2D' in config: for k, v in config['2D'].iteritems(): self.mesh[k] = v self.marker[k] = '2D' if '3D' in config: for k, v in config['3D'].iteritems(): self.mesh[k] = v self.marker[k] = '3D' def _init_menu(self): self.menu_handler = MenuHandler() add_entry = self.menu_handler.insert( "Add object" ) self.menu_item = dict() for k in sorted(self.mesh.keys()): entry = self.menu_handler.insert(k, parent=add_entry, callback=self._add_cb) self.menu_item[entry] = k del_entry = self.menu_handler.insert( "Delete object", callback=self._del_cb) enable_entry = self.menu_handler.insert( "Movement control", callback=self._enable_cb ) self.menu_handler.setCheckState( enable_entry, MenuHandler.CHECKED ) def _add_cb(self, feedback): rospy.loginfo("Add marker: %s", self.menu_item[feedback.menu_entry_id]) self.add_object(self.menu_item[feedback.menu_entry_id], feedback.pose) def _del_cb(self, feedback): rospy.loginfo("Delete marker: %s", feedback.marker_name) self.delete_object(feedback.marker_name) def _update_cb(self, feedback): p = feedback.pose.position print "Marker " + feedback.marker_name + " position: " + str(round(p.x,2)) + ", " + str(round(p.y,2)) + ", " + str(round(p.z,2)) if hasattr(self, "vp_timer_"+feedback.marker_name): getattr(self, "vp_timer_"+feedback.marker_name).cancel() setattr(self, "vp_timer_"+feedback.marker_name, Timer(3, self.update_object, [feedback])) getattr(self, "vp_timer_"+feedback.marker_name).start() def _enable_cb(self, feedback): handle = feedback.menu_entry_id state = self.menu_handler.getCheckState( handle ) if state == MenuHandler.CHECKED: self.menu_handler.setCheckState( handle, MenuHandler.UNCHECKED ) self._interactive = False else: self.menu_handler.setCheckState( handle, MenuHandler.CHECKED ) self._interactive = True self.menu_handler.reApply( self._server ) self.load_objects() self._server.applyChanges() def _next_id(self): self._soma_id += 1 return self._soma_id def _retrieve_objects(self): objs = self._msg_store.query(SOMAObject._type, message_query={"map": self.soma_map, "config": self.soma_conf}) max_id = 0 for o,om in objs: if int(o.id) > max_id: max_id = int(o.id) self._soma_id = max_id return objs def load_objects(self): objs = self._retrieve_objects() # if collection is empty insert initial object if not objs: pose = Pose() self.add_object('Table', pose) return # otherwise, load all object from collection for o, om in objs: self._soma_obj_ids[o.id] = om['_id'] self._soma_obj_msg[o.id] = o self.load_object(o.id, o.type, o.pose) def load_object(self, soma_id, soma_type, pose): int_marker = self.create_object_marker(soma_id, soma_type, pose) self._server.insert(int_marker, self._update_cb) self.menu_handler.apply( self._server, soma_id ) self._server.applyChanges() def add_object(self, soma_type, pose): # todo: add to mongodb soma_id = self._next_id() soma_obj = SOMAObject() soma_obj.id = str(soma_id) soma_obj.map = str(self.soma_map) soma_obj.config = str(self.soma_conf) soma_obj.type = soma_type soma_obj.pose = pose soma_obj.pose.position.z = 0.0 soma_obj.frame = 'map' soma_obj.mesh = self.mesh[soma_type] _id = self._msg_store.insert(soma_obj) self._soma_obj_ids[soma_obj.id] = _id self._soma_obj_msg[soma_obj.id] = soma_obj # add object to geospatial store self._gs_store.insert(self.geo_json_from_soma_obj(soma_obj)) print "GS Store: added obj" self.load_object(str(soma_id), soma_type, soma_obj.pose) def geo_json_from_soma_obj(self, soma_obj): geo_json = {} geo_json['soma_id'] = soma_obj.id geo_json['soma_map'] = soma_obj.map geo_json['soma_config'] = soma_obj.config geo_json['type'] = soma_obj.type geo_json['loc'] = {'type': 'Point', 'coordinates': self._gs_store.coords_to_lnglat(soma_obj.pose.position.x, soma_obj.pose.position.y)} return geo_json def delete_object(self, soma_id): # geospatial store res = self._gs_store.find_one({'soma_id': soma_id, 'soma_map': self.soma_map, 'soma_config': self.soma_conf}) if res: _gs_id = res['_id'] self._gs_store.remove(_gs_id) print "GS Store: deleted obj" # message store _id = self._soma_obj_ids[str(soma_id)] self._msg_store.delete(str(_id)) self._server.erase(soma_id) self._server.applyChanges() def update_object(self, feedback): print "Updated marker " + feedback.marker_name _id = self._soma_obj_ids[feedback.marker_name] msg = self._soma_obj_msg[feedback.marker_name] new_msg = copy.deepcopy(msg) new_msg.pose = feedback.pose self._msg_store.update_id(_id, new_msg) # geospatial store # delete old message res = self._gs_store.find_one({'soma_id': new_msg.id, 'soma_map': self.soma_map, 'soma_config': self.soma_conf}) if res: _gs_id = res['_id'] self._gs_store.remove(_gs_id) print "GS Store: deleted obj" # add new object to geospatial store self._gs_store.insert(self.geo_json_from_soma_obj(new_msg)) print "GS Store: added obj" def create_object_marker(self, soma_obj, soma_type, pose): # create an interactive marker for our server int_marker = InteractiveMarker() int_marker.header.frame_id = "map" int_marker.name = soma_obj int_marker.description = "id" + soma_obj int_marker.pose = pose int_marker.pose.position.z = 0.01 mesh_marker = Marker() mesh_marker.type = Marker.MESH_RESOURCE mesh_marker.scale.x = 1 mesh_marker.scale.y = 1 mesh_marker.scale.z = 1 random.seed(soma_type) val = random.random() mesh_marker.color.r = r_func(val) mesh_marker.color.g = g_func(val) mesh_marker.color.b = b_func(val) mesh_marker.color.a = 1.0 #mesh_marker.pose = pose mesh_marker.mesh_resource = self.mesh[soma_type] # create a control which will move the box # this control does not contain any markers, # which will cause RViz to insert two arrows control = InteractiveMarkerControl() control.orientation.w = 1 control.orientation.x = 0 control.orientation.y = 1 control.orientation.z = 0 control.interaction_mode = InteractiveMarkerControl.MOVE_ROTATE if self._interactive: int_marker.controls.append(copy.deepcopy(control)) # add the control to the interactive marker if self.marker[soma_type] == '3D': control.interaction_mode = InteractiveMarkerControl.MOVE_AXIS int_marker.controls.append(control) # add menu control menu_control = InteractiveMarkerControl() menu_control.interaction_mode = InteractiveMarkerControl.BUTTON menu_control.always_visible = True menu_control.markers.append( mesh_marker) #makeBox(int_marker) ) int_marker.controls.append(menu_control) return int_marker
class Logger(object): def __init__(self): self.task_msp = MessageStoreProxy(collection='mdp_task_exec_logs') self.nav_msp = MessageStoreProxy(collection='mdp_nav_exec_logs') self.current_goal_id = None self.active_task_stats = {} self.active_nav_stats = {} self.terminal_states = [ GoalStatus.PREEMPTED, GoalStatus.SUCCEEDED, GoalStatus.ABORTED, GoalStatus.REJECTED, GoalStatus.RECALLED, GoalStatus.LOST ] mdp_goal_sub = rospy.Subscriber("mdp_plan_exec/execute_policy/goal", ExecutePolicyActionGoal, self.mdp_goal_cb) mdp_feedback_sub = rospy.Subscriber( "mdp_plan_exec/execute_policy/feedback", ExecutePolicyActionFeedback, self.mdp_feedback_cb) mdp_status_sub = rospy.Subscriber( "mdp_plan_exec/execute_policy/status", GoalStatusArray, self.mdp_status_cb) self.nav_stat_sub = rospy.Subscriber( "topological_navigation/Statistics", NavStatistics, self.nav_stats_cb) self.get_edge_estimates = rospy.ServiceProxy( "/topological_prediction/predict_edges", PredictEdgeState) rospy.loginfo("Logger started") def nav_stats_cb(self, msg): self.active_nav_stats[self.current_goal_id].nav_stats.append(msg) if msg.edge_id in self.current_edge_estimates.edge_ids: edge_id = self.current_edge_estimates.edge_ids.index(msg.edge_id) self.active_nav_stats[self.current_goal_id].success_probs.append( self.current_edge_estimates.probs[edge_id]) self.active_nav_stats[self.current_goal_id].expected_times.append( self.current_edge_estimates.durations[edge_id]) else: self.active_nav_stats[self.current_goal_id].success_probs.append(0) self.active_nav_stats[self.current_goal_id].expected_times.append( rospy.Duration(0)) def mdp_feedback_cb(self, msg): goal_id = msg.status.goal_id.id if self.active_task_stats.has_key(goal_id): self.active_task_stats[goal_id].feedbacks.append(msg) def mdp_goal_cb(self, msg): self.current_goal_id = msg.goal_id.id self.active_task_stats[self.current_goal_id] = TaskExecutionStat( mdp_goal=msg, feedbacks=[], durations=[]) self.active_nav_stats[self.current_goal_id] = NavExecutionStat( mdp_goal=msg, nav_stats=[], success_probs=[], expected_times=[]) self.current_edge_estimates = self.get_edge_estimates(rospy.Time.now()) def mdp_status_cb(self, msg): for status in msg.status_list: if status.status in self.terminal_states: goal_id = status.goal_id.id if self.active_task_stats.has_key(goal_id): rospy.loginfo("Inserting task stat") task_stat = self.active_task_stats[goal_id] self.add_durations(task_stat) self.task_msp.insert(task_stat) del self.active_task_stats[goal_id] if self.active_nav_stats.has_key(goal_id): rospy.loginfo("Inserting navigation specific stat") nav_stat = self.active_nav_stats[goal_id] self.nav_msp.insert(nav_stat) del self.active_nav_stats[goal_id] def add_durations(self, task_stat): for i in range(0, len(task_stat.feedbacks) - 1): task_stat.durations.append(task_stat.feedbacks[i + 1].header.stamp - task_stat.feedbacks[i].header.stamp) def main(self): # Wait for control-c rospy.spin()
class ObjectDetectionDB(object): def __init__(self): self.db_name = rospy.get_param('robot/database','jsk_robot_lifelog') try: self.col_name = rospy.get_param('robot/name') except KeyError as e: rospy.logerr("please specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.update_cycle = rospy.get_param('update_cycle', 1) self.map_frame = rospy.get_param('~map_frmae', 'map') self.robot_frame = rospy.get_param('~robot_frame','base_footprint') self.tf_listener = tf.TransformListener() self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) rospy.loginfo("map->robot: %s -> %s" % (self.map_frame, self.robot_frame)) self.subscribers = [] # DB Insertion function def _insert_pose_to_db(self, map_to_robot, robot_to_obj): try: self.msg_store.insert(map_to_robot) self.msg_store.insert(robot_to_obj) rospy.loginfo('inserted map2robot: %s, robot2obj: %s' % (map_to_robot, robot_to_obj)) except Exception as e: rospy.logwarn('failed to insert to db' + e) def _lookup_transform(self, target_frame, source_frame, time=rospy.Time(0), timeout=rospy.Duration(0.0)): self.tf_listener.waitForTransform(target_frame, source_frame, time, timeout) res = self.tf_listener.lookupTransform(target_frame, source_frame, time) ret = TransformStamped() ret.header.frame_id = target_frame ret.header.stamp = time ret.child_frame_id = source_frame ret.transform.translation.x = res[0][0] ret.transform.translation.y = res[0][1] ret.transform.translation.z = res[0][2] ret.transform.rotation.x = res[1][0] ret.transform.rotation.y = res[1][1] ret.transform.rotation.z = res[1][2] ret.transform.rotation.w = res[1][3] return ret def _objectdetection_cb(self, msg): try: self.tf_listener.waitForTransform(self.robot_frame, self.map_frame, msg.header.stamp, rospy.Duration(1.0)) map_to_robot = self._lookup_transform(self.robot_frame, self.map_frame, msg.header.stamp) except Exception as e: rospy.logwarn("failed to lookup tf: %s", e) return try: for obj in msg.objects: spose = PoseStamped(header=msg.header,pose=obj.pose) tpose = self.tf_listener.transformPose(self.robot_frame, spose) obj.pose = tpose.pose self._insert_pose_to_db(map_to_robot, obj) except Exception as e: rospy.logwarn("failed to object pose transform: %s", e) def _update_subscribers(self): current_subscribers = rospy.client.get_published_topics() targets = [x for x in current_subscribers if x[1]=='posedetection_msgs/ObjectDetection' and (not ('_agg' in x[0]))] for sub in self.subscribers: if sub.get_num_connections() == 0: sub.unregister() self.subscribers.remove(sub) rospy.loginfo('unsubscribe (%s)',sub.name) for topic_info in targets: if topic_info[0] in [x.name for x in self.subscribers]: continue sub = rospy.Subscriber(topic_info[0], ObjectDetection, self._objectdetection_cb) self.subscribers += [sub] rospy.loginfo('start subscribe (%s)',sub.name) def sleep_one_cycle(self): self._update_subscribers() rospy.sleep(self.update_cycle)
class SOMAROIDrawer(): def __init__(self): #self.soma_map = soma_map #self.soma_map_name = soma_map_name # default file rp = RosPack() # self._interactive = True self._msg_store=MessageStoreProxy(database="soma2data",collection="soma2_roi") s = rospy.Service('soma2/draw_roi', DrawROI, self.handle_draw_roi) # Publisher vis_pub = node_handle.advertise<visualization_msgs::Marker>( "visualization_marker", 0 ); self.markerpub = rospy.Publisher("visualization_marker_array", MarkerArray, queue_size=1) rospy.spin() def handle_draw_roi(self,req): self._delete_markers() if(req.roi_id >=0): return DrawROIResponse(self.load_objects(req.map_name,req.roi_id)) return True def _update_poly(self, feedback): return def _delete_markers(self): marker = Marker() marker.action = 3 marker.header.frame_id = "map" markerarray = MarkerArray() markerarray.markers.append(marker) self.markerpub.publish(markerarray) def coords_to_lnglat(x, y): earth_radius = 6371000.0 # in meters lng = 90 - math.degrees(math.acos(float(x) / earth_radius)) lat = 90 - math.degrees(math.acos(float(y) / earth_radius)) return [lng , lat] def _retrieve_objects(self, map_name, roi_id): objs = self._msg_store.query(SOMA2ROIObject._type, message_query={"map_name": map_name, "roi_id": roi_id}) #print objs max_id = 0 max_roi_id = 0 for o,om in objs: if int(o.id) > max_id: max_id = int(o.id) if int(o.roi_id) > max_roi_id: max_roi_id = int(o.roi_id) self._soma_id = max_id self._soma_roi_id = max_roi_id return objs def load_objects(self, map_name, roi_id): # this is the array for roi ids self._soma_obj_roi_ids = dict() markerarray = MarkerArray() #get objects from db objs = self._retrieve_objects(map_name,roi_id) # if collection is empty insert initial object if not objs: return False # otherwise, load all object from collection for o,om in objs: count = 1 for pose in o.posearray.poses: self.load_object(o.id, o.roi_id, o.type, pose,count,markerarray) count +=1 # print count self.draw_roi(roi_id,o.posearray.poses,markerarray,count) # print len(markerarray.markers) self.markerpub.publish(markerarray) return True def draw_all_roi(self): for key in self._soma_obj_roi_ids: self.draw_roi(key) def undraw_all_roi(self): for key in self._soma_obj_roi_ids: self.undraw_roi(key) def draw_roi(self, roi,poses,markerarray,ccstart): roicp = roi p = poses cc = ccstart # print "t is ",t," p is ", p for pose in p: # print "This is the pose: ", pose int_marker = self.create_roi_marker(roi, pose, p,cc) markerarray.markers.append(int_marker) cc = cc+1 def undraw_roi(self, roi): self._server.erase("ROI-" + roi) self._server.applyChanges() def load_object(self, soma_id, roi, soma_type, pose,markerno, markerarray): # print self._soma_obj_markers[str(soma_id)] # print str(soma_id) int_marker = self.create_object_marker(soma_id, roi, soma_type, pose, markerno) markerarray.markers.append(int_marker) #self.markerpub.publish(int_marker) #print self._soma_obj_markers[str(soma_id)].keys() #soma_type = Office, Kitchen, etc, Pose is position def add_object(self, soma_type, pose, roi_id=None): # todo: add to mongodb #create a SOMA2ROI Object soma_obj = SOMA2ROIObject() # print roi_id # a new roi if roi_id == None: #soma_id is an id for the soma object like 1,2,3,4. It updates itself from the db if there are existing objects soma_id = self._next_id() #soma_roi_id is acutally the roi number. Is it 1,2,3,4? Multiple soma objects can have the same roi id soma_roi_id = self._next_roi_id() roi_id = soma_roi_id # print soma_roi_id soma_obj.id = str(soma_id) soma_obj.roi_id = str(soma_roi_id) soma_obj.map_name = str(self.soma_map_name) soma_obj.map_unique_id = str(self.map_unique_id) soma_obj.config = str(self.soma_conf) soma_obj.type = soma_type soma_obj.posearray.poses.append(pose) soma_obj.frame = 'map' self._soma_obj_roi_ids[str(soma_roi_id)] = list() self._soma_obj_markers[soma_obj.id] = dict() #_id = self._msg_store.update_id _id = self._msg_store.insert(soma_obj) self._soma_obj_ids[soma_obj.id] = _id self._soma_obj_roi_ids[soma_obj.roi_id].append(soma_obj.id) self._soma_obj_type[soma_obj.id] = soma_type self._soma_obj_roi[soma_obj.id] = roi_id self._soma_obj_msg[soma_obj.id] = soma_obj self._soma_obj_pose[soma_obj.id] = soma_obj.posearray.poses else: # Get the roi id soma_roi_id = roi_id #print roi_id," ",self.soma_map," ",self.soma_conf," ",self._soma_obj_ids['1'] #call the object with that id res = self._msg_store.query(SOMA2ROIObject._type,message_query={'id':str(roi_id)}) #iterate through the objects. Normally there should be only 1 object returned for o,om in res: # print o," hi ",om soma_obj = o # print "Soma Object: ", soma_obj if soma_obj: soma_obj.posearray.poses.append(pose) self._soma_obj_pose[soma_obj.id] = soma_obj.posearray.poses self.insert_geo_json(soma_obj.roi_id,soma_obj) #print soma_obj _id = self._soma_obj_ids[soma_obj.id] _newid = self._msg_store.update_id(_id,soma_obj) soma_id = soma_obj.id self._soma_obj_msg[soma_obj.id] = soma_obj #_id = self._msg_store.update_id #self._soma_obj_ids[soma_obj.id] = _id #self._soma_obj_msg[soma_obj.id] = soma_obj #self._soma_obj_roi_ids[soma_obj.roi_id].append(soma_obj.id) #self._soma_obj_roi[soma_obj.id].append(soma_obj.roi_id) #for pose in soma_obj.posearray.poses: self.load_object(str(soma_id), soma_obj.roi_id, soma_type, pose) def create_object_marker(self, soma_obj, roi, soma_type, pose,markerno): # create an interactive marker for our server marker = Marker() marker.header.frame_id = "map" #int_marker.name = soma_obj+'_'+str(markerno) #int_marker.description = soma_type + ' (' + roi +'_'+str(markerno)+ ')' marker.pose = pose marker.id = markerno; # print marker.pose marker.pose.position.z = 0.01 #marker = Marker() marker.type = Marker.SPHERE marker.action = 0 marker.scale.x = 0.25 marker.scale.y = 0.25 marker.scale.z = 0.25 marker.pose.position.z = (marker.scale.z / 2) random.seed(soma_type) val = random.random() marker.color.r = r_func(val) marker.color.g = g_func(val) marker.color.b = b_func(val) marker.color.a = 1.0 #marker.pose = pose return marker # This part draws the line strips between the points def create_roi_marker(self, roi, pose, points, count): #print "POINTS: " + str(points) #points are all the points belong to that roi, pose is one of the points marker = Marker() # print "Marker name: ", int_marker.name marker.pose = pose marker.header.frame_id = "map" marker.type = Marker.LINE_STRIP marker.scale.x = 0.1 marker.id= count random.seed(10) val = random.random() marker.color.r = r_func(val) marker.color.g = g_func(val) marker.color.b = b_func(val) marker.color.a = 1.0 marker.points = [] for point in points: p = Point() pose = point#self._soma_obj_pose[point] p.x = pose.position.x - marker.pose.position.x p.y = pose.position.y - marker.pose.position.y marker.points.append(p) p = Point() pose = points[0] p.x = pose.position.x - marker.pose.position.x p.y = pose.position.y - marker.pose.position.y marker.points.append(p) return marker
# let's say we have a couple of things that we need to store together # these could be some sensor data, results of processing etc. pose = Pose(Point(0, 1, 2), Quaternion(3, 4, 5, 6)) point = Point(7, 8, 9) quaternion = Quaternion(10, 11, 12, 13) # note that everything that is pass to the message_store must be a ros message type #therefore use std_msg types for standard data types like float, int, bool, string etc result = Bool(True) # we will store our results in a separate collection msg_store = MessageStoreProxy(collection='pose_results') # save the ids from each addition stored = [] stored.append([pose._type, msg_store.insert(pose)]) stored.append([point._type, msg_store.insert(point)]) stored.append([quaternion._type, msg_store.insert(quaternion)]) stored.append([result._type, msg_store.insert(result)]) # now store ids togther in store, addition types for safety spl = StringPairList() for pair in stored: spl.pairs.append(StringPair(pair[0], pair[1])) # and add some meta information meta = {} meta['description'] = "this wasn't great" meta['result_time'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec()) msg_store.insert(spl, meta = meta)
#!/usr/bin/env python import json import yaml import sys from strands_navigation_msgs.msg import TopologicalNode import mongodb_store.util as dc_util from mongodb_store.message_store import MessageStoreProxy if __name__ == '__main__': if len(sys.argv) < 2: print "usage: insert_map input_file.txt" sys.exit(2) filename = str(sys.argv[1]) #dataset_name=str(sys.argv[2]) #map_name=str(sys.argv[3]) msg_store = MessageStoreProxy(collection='topological_maps') json_data = open(filename, 'rb').read() data = json.loads(json_data) for i in data: meta = i['meta'] msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode) msg_store.insert(msgv, meta) #mongodb_store.util.store_message(points_db,p,val)
class PeriodicReplicatorClient(object): def __init__(self): super(PeriodicReplicatorClient, self).__init__() # parameters self.interval = rospy.get_param("~interval", 60 * 60 * 24) # default: 1 day self.delete_after_move = rospy.get_param("~delete_after_move", False) self.monitor_network = rospy.get_param("~monitor_network", False) self.poll_rate = min(300, self.interval) # database to be replicated self.database = rospy.get_param("robot/database") self.collections = rospy.get_param("~extra_collections", list()) try: self.collections.append(rospy.get_param("robot/name")) except KeyError as e: rospy.logerr("specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) # network monitoring self.network_connected = False if self.monitor_network: self.net_sub = rospy.Subscriber("/network/connected", Bool, self.network_connected_cb) # database interface self.date_msg_store = MessageStoreProxy(database=self.database, collection="replication") self.replicate_ac = actionlib.SimpleActionClient( 'move_mongodb_entries', MoveEntriesAction) for i in range(30): rospy.loginfo( "waiting for advertise action /move_mongodb_entries ...[%d/30]" % i) if rospy.is_shutdown() or self.replicate_ac.wait_for_server( timeout=rospy.Duration(1)): break self.poll_timer = rospy.Timer(rospy.Duration(self.poll_rate), self.timer_cb) rospy.loginfo( "replication enabled: db: %s, collection: %s, interval: %d [sec]", self.database, self.collections, self.interval) def network_ok(self): if self.monitor_network: return self.network_connected else: return True def get_last_replicated(self): try: last_replicated = self.date_msg_store.query(StringList._type, single=True, sort_query=[ ("$natural", -1) ]) date = last_replicated[1]["inserted_at"] utcdate = date.astimezone(pytz.utc).replace(tzinfo=None) epoch = datetime(1970, 1, 1) seconds = (utcdate - epoch).total_seconds() return rospy.Time.from_seconds(seconds) except Exception as e: rospy.logerr(e) return rospy.Time(0) def insert_replicate_date(self): try: self.date_msg_store.insert(StringList(self.collections)) except Exception as e: rospy.logwarn( "failed to insert last replicate date to database: %s", e) def move_entries(self, move_before=None): move_before = move_before or rospy.Duration(self.interval) goal = MoveEntriesGoal(database=self.database, collections=StringList(self.collections), move_before=move_before, delete_after_move=self.delete_after_move) self.replicate_ac.send_goal(goal, done_cb=self.done_cb, active_cb=self.active_cb, feedback_cb=self.feedback_cb) while not self.replicate_ac.wait_for_result( timeout=rospy.Duration(5.0)): if rospy.is_shutdown(): break elif self.disable_on_wireless_network and not self.network_connected: rospy.loginfo( "disconnected wired network connection. canceling replication..." ) self.replicate_ac.cancel_all_goals() def timer_cb(self, event=None): rospy.loginfo("timer callback %s" % event) if self.replicate_ac.get_state() == GoalStatus.ACTIVE: return if not self.network_ok(): rospy.loginfo("No network connection. Skipping replication.") return try: now = event.current_real except: now = rospy.Time.now() last_replicated = self.get_last_replicated() if (now - last_replicated).to_sec() < self.interval: rospy.loginfo("Interval %d < %d. skipping replication.", (now - last_replicated).to_sec(), self.interval) return rospy.loginfo("Starting replication") self.move_entries(rospy.Duration(self.interval)) def done_cb(self, status, result): if status == GoalStatus.SUCCEEDED: rospy.loginfo("Replication suceeded") self.insert_replicate_date() else: rospy.logwarn("Replication finished with status %s" % status) def active_cb(self): if not self.network_ok(): rospy.loginfo( "disconnected wired network connection. canceling replication..." ) self.replicate_ac.cancel_all_goals() def feedback_cb(self, feedback): rospy.loginfo(feedback) def network_connected_cb(self, msg): self.network_connected = msg.data
msg_store = MessageStoreProxy() msg_store_maps = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["stored_type"] = "strands_navigation_msgs/TopologicalNode" available = len(msg_store.query(TopologicalNode._type, {}, query_meta)) > 0 #print available if available <= 0 : #rospy.logerr("Desired pointset '"+point_set+"' not in datacentre") #rospy.logerr("Available pointsets: "+str(available)) raise Exception("Can't find waypoints.") else : message_list = msg_store.query(TopologicalNode._type, {}, query_meta) for i in message_list: #print i meta = {} meta["node"] = i[0].name meta["map"] = i[0].map meta["pointset"] = i[0].pointset available = len(msg_store_maps.query(TopologicalNode._type, {}, meta)) if available == 0 : msg_store_maps.insert(i[0],meta) else : rospy.logerr("this point is already in datacentre:") print meta
sys.exit(2) filename=str(sys.argv[1]) #dataset_name=str(sys.argv[2]) #map_name=str(sys.argv[3]) msg_store = MessageStoreProxy(collection='topological_maps') json_data=open(filename, 'rb').read() data = json.loads(json_data) for i in data: meta = i['meta'] msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode) msg_store.insert(msgv,meta) #mongodb_store.util.store_message(points_db,p,val)