def loadMap(self, point_set, filename): point_set = str(sys.argv[1]) #map_name=str(sys.argv[3]) msg_store = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["pointset"] = point_set available = len(msg_store.query(TopologicalNode._type, {}, query_meta)) > 0 print available if available <= 0: rospy.logerr("Desired pointset '" + point_set + "' not in datacentre") rospy.logerr("Available pointsets: " + str(available)) raise Exception("Can't find waypoints.") else: query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(TopologicalNode._type, {}, query_meta) #node=TopologicalNode() top_map = [] for i in message_list: nodeinf = {} nodeinf["node"] = yaml.load(str(i[0])) if nodeinf["node"]["localise_by_topic"]: nodeinf["node"]["localise_by_topic"] = json.dumps( nodeinf["node"]["localise_by_topic"]) nodeinf["meta"] = i[ 1] #str(bson.json_util.dumps(i[1], indent=1)) nodeinf["meta"].pop("last_updated_by", None) nodeinf["meta"].pop('inserted_at', None) nodeinf["meta"].pop('last_updated_at', None) nodeinf["meta"].pop('stored_type', None) nodeinf["meta"].pop('stored_class', None) nodeinf["meta"].pop('inserted_by', None) nodeinf["meta"].pop('_id', None) top_map.append(nodeinf) #val = bson.json_util.dumps(nodeinf["meta"], indent=1) top_map.sort(key=lambda x: x['node']['name']) yml = yaml.safe_dump(top_map, default_flow_style=False) #print yml #print s_output fh = open(filename, "w") #s_output = str(bson.json_util.dumps(nodeinf, indent=1)) s_output = str(yml) #print s_output fh.write(s_output) fh.close
def remove_node(self, node_name) : rospy.loginfo('Removing Node: '+node_name) msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : node_name, "pointset": self.name} query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map available = msg_store.query(TopologicalNode._type, query, query_meta) node_found = False if len(available) == 1 : node_found = True rm_id = str(available[0][1]['_id']) print rm_id else : rospy.logerr("Node not found "+str(len(available))+" waypoints found after query") #rospy.logerr("Available data: "+str(available)) if node_found : query_meta = {} query_meta["pointset"] = self.name edges_to_rm = [] message_list = msg_store.query(TopologicalNode._type, {}, query_meta) for i in message_list: for j in i[0].edges : if j.node == node_name : edge_rm = i[0].name+'_'+node_name edges_to_rm.append(edge_rm) for k in edges_to_rm : print 'remove: '+k self.remove_edge(k) msg_store.delete(rm_id)
class DBPlay(object): def __init__(self): self.db_name = rospy.get_param('~db_name', 'jsk_robot_lifelog') self.col_name = rospy.get_param('~col_name', 'pr1012') self.use_ros_time = rospy.get_param('~use_ros_time', True) self.use_sim_time = rospy.get_param('use_sim_time', False) self.downsample = rospy.get_param("~downsample", 3) self.label_downsample = rospy.get_param("~label_downsample", 10) self.duration = rospy.get_param('~duration', 10) # days self.limit = rospy.get_param("~limit", 1000) self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) self.pub = rospy.Publisher('/move_base_marker_array', MarkerArray) self.marker_count = 0 def run(self): while not rospy.is_shutdown(): rospy.loginfo( "$gt: %d" % (rospy.Time.now().secs - self.duration * 60 * 60 * 24)) if self.use_ros_time or self.use_sim_time: rospy.loginfo("fetching data") trans = self.msg_store.query( type=TransformStamped._type, message_query={ "header.stamp.secs": { "$lte": rospy.Time.now().secs, "$gt": rospy.Time.now().secs - self.duration * 60 * 60 * 24 } }, sort_query=[("$natural", 1)], limit=self.limit) else: trans = self.msg_store.query( type=TransformStamped._type, meta_query={ "inserted_at": { "$gt": datetime.utcnow() - timedelta(days=self.duration) } }, sort_query=[("$natural", 1)]) rospy.loginfo("found %d msgs" % len(trans)) m_arr = MarkerArray() m_arr.markers = V.transformStampedArrayToLabeledLineStripMarker( trans[::self.downsample], label_downsample=self.label_downsample, discrete=True) m_arr.markers = V.transformStampedArrayToLabeledArrayMarker( trans[::self.downsample], label_downsample=self.label_downsample, discrete=True) self.pub.publish(m_arr) rospy.sleep(1.0) rospy.loginfo("publishing move_base_marker_array")
def update_node_name(self, node_name, new_name): msg_store = MessageStoreProxy(collection='topological_maps') # The query retrieves the node name with the given name from the given pointset. query = {"name": node_name, "pointset": self.name} # The meta-information is some additional information about the specific # map that we are interested in (?) query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map # This returns a tuple containing the object, if it exists, and some # information about how it's stored in the database. available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1: available[0][0].name = new_name # Also need to update all edges which involve the renamed node allnodes_query = {"pointset": self.name} allnodes_query_meta = {} allnodes_query_meta["pointset"] = self.name allnodes_query_meta["map"] = self.map # this produces a list of tuples, each with [0] as the node, [1] as database info allnodes_available = msg_store.query(TopologicalNode._type, {}, allnodes_query_meta) # Check the edges of each node for a reference to the node to be # renamed, and change the edge id if there is one. Enumerate the # values so that we can edit the objects in place to send them back # to the database. for node_ind, node_tpl in enumerate(allnodes_available): for edge_ind, edge in enumerate(node_tpl[0].edges): # change names of the edges in other nodes, and update their values in the database if node_tpl[ 0].name != node_name and node_name in edge.edge_id: allnodes_available[node_ind][0].edges[ edge_ind].edge_id = edge.edge_id.replace( node_name, new_name) # must also update the name of the node this edge goes to allnodes_available[node_ind][0].edges[ edge_ind].node = new_name curnode_query = { "name": node_tpl[0].name, "pointset": self.name } msg_store.update(allnodes_available[node_ind][0], allnodes_query_meta, curnode_query, upsert=True) # update all edge ids for this node for edge_ind, edge in enumerate(available[0][0].edges): available[0][0].edges[edge_ind].edge_id = edge.edge_id.replace( node_name, new_name) msg_store.update(available[0][0], query_meta, query, upsert=True) else: rospy.logerr("Impossible to store in DB " + str(len(available)) + " waypoints found after query") rospy.logerr("Available data: " + str(available))
def loadMap(self, point_set, filename): point_set=str(sys.argv[1]) #map_name=str(sys.argv[3]) msg_store = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["pointset"] = point_set available = len(msg_store.query(TopologicalNode._type, {}, query_meta)) > 0 print available if available <= 0 : rospy.logerr("Desired pointset '"+point_set+"' not in datacentre") rospy.logerr("Available pointsets: "+str(available)) raise Exception("Can't find waypoints.") else : query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(TopologicalNode._type, {}, query_meta) #node=TopologicalNode() top_map=[] for i in message_list: nodeinf = {} nodeinf["node"] = yaml.load(str(i[0])) if nodeinf["node"]["localise_by_topic"]: nodeinf["node"]["localise_by_topic"] = json.dumps(nodeinf["node"]["localise_by_topic"]) nodeinf["meta"] = i[1] #str(bson.json_util.dumps(i[1], indent=1)) nodeinf["meta"].pop("last_updated_by", None) nodeinf["meta"].pop('inserted_at', None) nodeinf["meta"].pop('last_updated_at', None) nodeinf["meta"].pop('stored_type', None) nodeinf["meta"].pop('stored_class', None) nodeinf["meta"].pop('inserted_by', None) nodeinf["meta"].pop('_id', None) top_map.append(nodeinf) #val = bson.json_util.dumps(nodeinf["meta"], indent=1) top_map.sort(key=lambda x: x['node']['name']) yml = yaml.safe_dump(top_map, default_flow_style=False) #print yml #print s_output fh = open(filename, "w") #s_output = str(bson.json_util.dumps(nodeinf, indent=1)) s_output = str(yml) #print s_output fh.write(s_output) fh.close
def loadMap(self, point_set): msg_store = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["pointset"] = point_set available = len(msg_store.query(TopologicalNode._type, {}, query_meta)) > 0 if available <= 0: rospy.logerr("Desired pointset '" + point_set + "' not in datacentre") rospy.logerr("Available pointsets: " + str(available)) raise Exception("Can't find waypoints.") else: query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(TopologicalNode._type, {}, query_meta) points = [] for i in message_list: self.map = i[0].map b = topological_node(i[0].name) edges = [] for j in i[0].edges: data = {} data["node"] = j.node data["action"] = j.action edges.append(data) b.edges = edges verts = [] for j in i[0].verts: data = [j.x, j.y] verts.append(data) b._insert_vertices(verts) c = i[0].pose waypoint = [ str(c.position.x), str(c.position.y), str(c.position.z), str(c.orientation.x), str(c.orientation.y), str(c.orientation.z), str(c.orientation.w) ] b.waypoint = waypoint b._get_coords() points.append(b) return points
def loadConfig(self, dataset_name, collection_name="aaf_walking_group", meta_name="waypoint_set"): msg_store = MessageStoreProxy(collection=collection_name) query_meta = {} query_meta[meta_name] = dataset_name if len(msg_store.query(std_msgs.msg.String._type, {}, query_meta)) == 0: rospy.logerr("Desired data set '"+meta_name+": "+dataset_name+"' not in datacentre.") raise Exception("Can't find data in datacentre.") else: message = msg_store.query(std_msgs.msg.String._type, {}, query_meta) return json.loads(message[0][0].data)
def loadConfig(self, data_set): msg_store = MessageStoreProxy(collection="hri_behaviours") query_meta = {} query_meta["collection"] = data_set if len(msg_store.query(std_msgs.msg.String._type, {}, query_meta)) == 0: rospy.logerr("Desired dialogue options '"+data_set+"' not in datacentre.") raise Exception("Can't find data in datacentre.") else: message = msg_store.query(std_msgs.msg.String._type, {}, query_meta) return json.loads(message[0][0].data)
def test_add_message_no_wait(self): msg_store = MessageStoreProxy() count_before_insert = len( msg_store.query(Pose._type, meta_query={"no_wait": True})) p = Pose(Point(0, 1, 2), Quaternion(0, 0, 0, 1)) for i in range(10): msg_store.insert(p, meta={"no_wait": True}, wait=False) rospy.sleep(2) count_after_insert = len( msg_store.query(Pose._type, meta_query={"no_wait": True})) self.assertTrue(count_after_insert > count_before_insert)
def test_add_message(self): msg_store = MessageStoreProxy() POSE_NAME = "__test__pose__" p = Pose(Point(0, 1, 2), Quaternion(0, 0, 0, 1)) # insert a pose object with a name msg_store.insert_named(POSE_NAME, p) # get it back with a name stored, meta = msg_store.query_named(POSE_NAME, Pose._type) self.assertIsInstance(stored, Pose) self.assertEqual(stored.position.x, p.position.x) self.assertEqual(stored.position.y, p.position.y) self.assertEqual(stored.position.z, p.position.z) self.assertEqual(stored.orientation.x, p.orientation.x) self.assertEqual(stored.orientation.y, p.orientation.y) self.assertEqual(stored.orientation.z, p.orientation.z) self.assertEqual(stored.orientation.w, p.orientation.w) p.position.x = 666 msg_store.update_named(POSE_NAME, p) # get it back with a name updated = msg_store.query_named(POSE_NAME, Pose._type)[0] self.assertEqual(updated.position.x, p.position.x) # # try to get it back with an incorrect name wrong_name = "thid name does not exist in the datacentre" none_item = msg_store.query_named(wrong_name, Pose._type)[0] self.assertIsNone(none_item) # # get all non-existant typed objects, so get an empty list back none_query = msg_store.query( "not my type") self.assertEqual(len(none_query), 0) # add 100 query and sort by date inserted. for i in range(100): p = Pose(Point(0, 0, 0), Quaternion(i, 0, 100, 1)) msg_store.insert(p) result = msg_store.query(Pose._type, message_query={ 'orientation.z': {'$gt': 10} }, sort_query=[("$natural", -1)]) self.assertEqual(len(result), 100) self.assertEqual(result[0][0].orientation.x, 99) # must remove the item or unittest only really valid once print meta["_id"] print str(meta["_id"]) deleted = msg_store.delete(str(meta["_id"])) self.assertTrue(deleted)
class SOMAROIQuery(): def __init__(self, soma_map, soma_conf): self.soma_map = soma_map self.soma_conf = soma_conf self._msg_store = MessageStoreProxy(collection="soma_roi") def get_polygon(self, roi_id): objs = self._msg_store.query(SOMAROIObject._type, message_query={ "map": self.soma_map, "config": self.soma_conf, "roi_id": roi_id }) ids = [] poses = [] for o, om in objs: ids.append(o.id) poses.append(o.pose) sorted_poses = [_pose for (_id, _pose) in sorted(zip(ids, poses))] poly = Polygon() poly.points = [] for p in sorted_poses: point = Point() point.x = p.position.x point.y = p.position.y poly.points.append(point) return poly def get_rois(self, roi_type=None): """ Returns a set of roi IDs of the given type. If type not specified, returns all rois in this map/configuration. """ if roi_type is not None: objs = self._msg_store.query(SOMAROIObject._type, message_query={ "map": self.soma_map, "config": self.soma_conf, "type": roi_type }) else: objs = self._msg_store.query(SOMAROIObject._type, message_query={ "map": self.soma_map, "config": self.soma_conf }) #TODO: here it would be nice to be able to use mongodb distinct function rois = set() for o in objs: rois.add(o[0].roi_id) return rois
def loadDialogue(self, dialogue_name): msg_store = MessageStoreProxy(collection="hri_behaviours") query_meta = {} query_meta["hri_dialogue"] = dialogue_name if len(msg_store.query(std_msgs.msg.String._type, {}, query_meta)) == 0: rospy.logerr("Desired dialogue options '"+dialogue_name+"' not in datacentre") raise Exception("Can't find dialogue.") else: message_list = msg_store.query(std_msgs.msg.String._type, {}, query_meta) sentences = [] for message, meta in message_list: sentences.append(str(message.data)) return sentences
def loadMap(self, point_set): point_set=str(sys.argv[1]) #map_name=str(sys.argv[3]) msg_store = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["pointset"] = point_set available = len(msg_store.query(TopologicalNode._type, {}, query_meta)) > 0 print available if available <= 0 : rospy.logerr("Desired pointset '"+point_set+"' not in datacentre") rospy.logerr("Available pointsets: "+str(available)) raise Exception("Can't find waypoints.") else : query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(TopologicalNode._type, {}, query_meta) points = [] for i in message_list: #print i[0].name b = topological_node(i[0].name) edges = [] for j in i[0].edges : data = {} data["node"]=j.node data["action"]=j.action edges.append(data) b.edges = edges verts = [] for j in i[0].verts : data = [j.x,j.y] verts.append(data) b._insert_vertices(verts) c=i[0].pose waypoint=[str(c.position.x), str(c.position.y), str(c.position.z), str(c.orientation.x), str(c.orientation.y), str(c.orientation.z), str(c.orientation.w)] b.waypoint = waypoint b._get_coords() points.append(b) return points
def getNodes(): #print queries.get_nodes(""," ") msg_store = MessageStoreProxy(collection="topological_maps") objs = msg_store.query(TopologicalNode._type, {"pointset": "dynamic_tmap"}) nodes, metas = zip(*objs) return nodes
def loadSchedule(self, givenTime): rospy.loginfo("Looking for schedules in MongoDB.") msg_store = MessageStoreProxy(collection='exploration_schedules') query = {"midnight": givenTime} available = msg_store.query(ExplorationSchedule._type, query, {}) if len(available) < 1: succeded = False rospy.loginfo("No schedules were found!") else: # Iterate through available array. # Construct message using K:V pairs. succeded = True rospy.loginfo("Schedule found... loading and publishing message!") load_schedule = ExplorationSchedule() #print available[0][0].timeInfo load_schedule.timeInfo = available[0][0].timeInfo load_schedule.entropy = available[0][0].entropy load_schedule.nodeID = available[0][0].nodeID load_schedule.midnight = available[0][0].midnight load_schedule.mode = available[0][0].mode self.soma_schedule = load_schedule return succeded
class NavRelaxant(object): def __init__(self, count_threshold): super(NavRelaxant, self).__init__() self.node_pairs = [] rospy.Subscriber('topological_map', TopologicalMap, self.map_callback) self.msg_store = MessageStoreProxy(collection='nav_stats') def map_callback(self, msg): node_pairs = [] for node in msg.nodes: for edge in node.edges: node_pairs.append((node.name, edge.node, edge.edge_id)) self.node_pairs = node_pairs def print_pair(self, start, end): count = len(self.msg_store.query(NavStatistics._type, {"origin": start, "target": end, "final_node": end})) rospy.loginfo('Nav stats from %s to %s: %s' % (start, end, count)) def print_nav_stats(self): # only really needed for testing while len(self.node_pairs) == 0 and not rospy.is_shutdown(): rospy.sleep(1) rospy.loginfo('Waiting for nodes') for (start, end, edge_id) in self.node_pairs: self.print_pair(start, end)
def add_edge(self, or_waypoint, de_waypoint, action) : #print 'removing edge: '+edge_name rospy.loginfo('Adding Edge from '+or_waypoint+' to '+de_waypoint+' using '+action) node_name = or_waypoint #nodeindx = self._get_node_index(edged[0]) msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : node_name, "pointset": self.name} query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1 : found =False for i in available[0][0].edges : #print i.node if i.node == de_waypoint : found=True break if not found : edge = Edge() edge.node = de_waypoint edge.action = action edge.top_vel = 0.55 available[0][0].edges.append(edge) msg_store.update(available[0][0], query_meta, query, upsert=True) else : rospy.logerr("Edge already exist: Try deleting it first") else : rospy.logerr("Impossible to store in DB "+str(len(available))+" waypoints found after query") rospy.logerr("Available data: "+str(available))
def rm_tag_cb(self, msg): #rospy.loginfo('Adding Tag '+msg.tag+' to '+str(msg.node)) succeded = True for j in msg.node: msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": j, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query( strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) succeded = False meta_out = None for i in available: msgid = i[1]['_id'] if 'tag' in i[1]: if msg.tag in i[1]['tag']: print 'removing tag' i[1]['tag'].remove(msg.tag) print 'new list of tags' print i[1]['tag'] msg_store.update_id(msgid, i[0], i[1], upsert=False) succeded = True meta_out = str(i[1]) return succeded, meta_out
def get_node_tags_from_mongo(self, req): #rospy.loginfo('Adding Tag '+msg.tag+' to '+str(msg.node)) succeded = True msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": req.node_name, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query( strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) if len(available) == 1: # The meta information for a node is in the second part of the tuple # returned by the message store query if 'tag' in available[0][1]: tags = available[0][1]['tag'] else: tags = [] else: succeded = False tags = [] return succeded, tags
class TrajectoryQueryService(): def __init__(self): self.gs = GeoSpatialStoreProxy('geospatial_store', 'soma') self.ms = MessageStoreProxy(collection="people_trajectory") # setting up the service self.ser = rospy.Service('/trajectory_query', TrajectoryQuery, self.service_cb) self.topic = 'trajectory_query' self.vis = TrajectoryVisualizer(self.topic) def retrieve_msg(self, uuid): res = self.ms.query(Trajectory._type, message_query={"uuid": uuid}) if len(res) < 1: rospy.logerr("Trajectory not found: %s" % uuid) return None elif len(res) > 1: rospy.logerr("Multiple trajectories found: %s" % uuid) t = res[0][0] return t t = res[0][0] return t def service_cb(self, req): rospy.loginfo("Request received: %s" % req) if req.visualize: self.vis.clear() res = TrajectoryQueryResponse() res.trajectories = Trajectories() try: json_query = json.loads(req.query) trajectories = self.gs.find(json_query) except: rospy.logerr("Invalid json => re-check syntax") res.error = True return res count = 0 for t in trajectories: if t.has_key('uuid'): count += 1 #rospy.loginfo("retrieve msg for uuid: %s" % t['uuid']) # otherwise result is not a trajectory => ignore msg = self.retrieve_msg(t['uuid']) if msg: res.trajectories.trajectories.append(msg) rospy.loginfo("Query result: %s trajectories" % count) if req.visualize: rospy.loginfo("Visualize result on topic: %s" % self.topic) self.vis.visualize_trajectories(res.trajectories) rospy.loginfo("Response returned") res.error = False return res def main(self): rospy.spin()
class NavRelaxant(object): def __init__(self, count_threshold): super(NavRelaxant, self).__init__() self.node_pairs = [] rospy.Subscriber('topological_map', TopologicalMap, self.map_callback) self.msg_store = MessageStoreProxy(collection='nav_stats') def map_callback(self, msg): node_pairs = [] for node in msg.nodes: for edge in node.edges: node_pairs.append((node.name, edge.node, edge.edge_id)) self.node_pairs = node_pairs def print_pair(self, start, end): count = len( self.msg_store.query(NavStatistics._type, { "origin": start, "target": end, "final_node": end })) rospy.loginfo('Nav stats from %s to %s: %s' % (start, end, count)) def print_nav_stats(self): # only really needed for testing while len(self.node_pairs) == 0 and not rospy.is_shutdown(): rospy.sleep(1) rospy.loginfo('Waiting for nodes') for (start, end, edge_id) in self.node_pairs: self.print_pair(start, end)
def update_edge(self, node_name, edge_id, new_action=None, new_top_vel=None): msg_store = MessageStoreProxy(collection='topological_maps') # The query retrieves the node name with the given name from the given pointset. query = {"name": node_name, "pointset": self.name} # The meta-information is some additional information about the specific # map that we are interested in (?) query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map # This returns a tuple containing the object, if it exists, and some # information about how it's stored in the database. available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1: for edge in available[0][0].edges: if edge.edge_id == edge_id: edge.action = new_action or edge.action edge.top_vel = new_top_vel or edge.top_vel msg_store.update(available[0][0], query_meta, query, upsert=True) else: rospy.logerr("Impossible to store in DB " + str(len(available)) + " waypoints found after query") rospy.logerr("Available data: " + str(available))
def get_maps(): """ Queries the database and returns details of the available topological maps. :return: A dictionary where each key is the name of a topological map and each item is a dictionary of details. Details are: "number_nodes" ; integer "edge_actions" : list of action server names used for traversal "last_modified" : datetime.datetime object for the last time a node was inserted """ maps = dict() msg_store = MessageStoreProxy(collection='topological_maps') nodes = msg_store.query(TopologicalNode._type) for node in nodes: pointset = node[1]["pointset"] if not maps.has_key(pointset): maps[pointset] = {"number_nodes": 0, "edge_actions": set(), "last_modified": ""} maps[pointset]["number_nodes"] += 1 if (maps[pointset]["last_modified"] == "" or node[1]["inserted_at"] > maps[pointset]["last_modified"]): maps[pointset]["last_modified"] = node[1]["inserted_at"] for edge in node[0].edges: maps[pointset]["edge_actions"].add(edge.action) return maps
def add_edge(self, or_waypoint, de_waypoint, action): #print 'removing edge: '+edge_name rospy.loginfo('Adding Edge from ' + or_waypoint + ' to ' + de_waypoint + ' using ' + action) node_name = or_waypoint #nodeindx = self._get_node_index(edged[0]) msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": node_name, "pointset": self.name} query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1: found = False for i in available[0][0].edges: #print i.node if i.node == de_waypoint: found = True break if not found: edge = Edge() edge.node = de_waypoint edge.action = action edge.top_vel = 0.55 available[0][0].edges.append(edge) msg_store.update(available[0][0], query_meta, query, upsert=True) else: rospy.logerr("Edge already exist: Try deleting it first") else: rospy.logerr("Impossible to store in DB " + str(len(available)) + " waypoints found after query") rospy.logerr("Available data: " + str(available))
def add_tag_cb(self, msg): #rospy.loginfo('Adding Tag '+msg.tag+' to '+str(msg.node)) succeded = True for j in msg.node: msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : j, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) for i in available: msgid= i[1]['_id'] if 'tag' in i[1]: if not msg.tag in i[1]['tag']: i[1]['tag'].append(msg.tag) else: a=[] a.append(msg.tag) i[1]['tag']=a meta_out = str(i[1]) msg_store.update_id(msgid, i[0], i[1], upsert = False) #print trstr if len(available) == 0: succeded = False return succeded, meta_out
def rm_tag_cb(self, msg): #rospy.loginfo('Adding Tag '+msg.tag+' to '+str(msg.node)) succeded = True for j in msg.node: msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : j, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) succeded = False for i in available: msgid= i[1]['_id'] if 'tag' in i[1]: if msg.tag in i[1]['tag']: print 'removing tag' i[1]['tag'].remove(msg.tag) print 'new list of tags' print i[1]['tag'] msg_store.update_id(msgid, i[0], i[1], upsert = False) succeded = True meta_out = str(i[1]) return succeded, meta_out
def add_tag_to_mongo(self, msg): succeded = True meta_out = None for j in msg.node: msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": j, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query( strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) for i in available: msgid = i[1]['_id'] if 'tag' in i[1]: if not msg.tag in i[1]['tag']: i[1]['tag'].append(msg.tag) else: a = [] a.append(msg.tag) i[1]['tag'] = a meta_out = str(i[1]) msg_store.update_id(msgid, i[0], i[1], upsert=False) #print trstr if len(available) == 0: succeded = False return succeded, meta_out
def add_localise_by_topic(tmap, node, json_str): #print req #data = json.loads(req.content) #print data msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : node, "pointset": tmap} query_meta = {} #query_meta["pointset"] = tmap #query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) if len(available) != 1: #succeded = False print 'there are no nodes or more than 1 with that name' else: #succeded = True for i in available: if not i[0].localise_by_topic: msgid= i[1]['_id'] i[0].localise_by_topic=json_str #print i[0] print "Updating %s--%s" %(i[0].pointset, i[0].name) msg_store.update_id(msgid, i[0], i[1], upsert = False)
class TrajectoryQueryService(): def __init__(self): self.gs = GeoSpatialStoreProxy('geospatial_store','soma') self.ms = MessageStoreProxy(collection="people_trajectory") # setting up the service self.ser = rospy.Service('/trajectory_query', TrajectoryQuery, self.service_cb) self.topic = 'trajectory_query' self.vis = TrajectoryVisualizer(self.topic) def retrieve_msg(self, uuid): res = self.ms.query(Trajectory._type, message_query={"uuid": uuid}) if len(res) < 1: rospy.logerr("Trajectory not found: %s" % uuid) return None elif len(res) > 1: rospy.logerr("Multiple trajectories found: %s" % uuid) t = res[0][0] return t t = res[0][0] return t def service_cb(self, req): rospy.loginfo("Request received: %s" % req) if req.visualize: self.vis.clear() res = TrajectoryQueryResponse() res.trajectories = Trajectories() try: json_query = json.loads(req.query) trajectories = self.gs.find(json_query) except: rospy.logerr("Invalid json => re-check syntax") res.error = True return res count = 0 for t in trajectories: if t.has_key('uuid'): count += 1 #rospy.loginfo("retrieve msg for uuid: %s" % t['uuid']) # otherwise result is not a trajectory => ignore msg = self.retrieve_msg(t['uuid']) if msg: res.trajectories.trajectories.append(msg) rospy.loginfo("Query result: %s trajectories" % count) if req.visualize: rospy.loginfo("Visualize result on topic: %s" % self.topic) self.vis.visualize_trajectories(res.trajectories) rospy.loginfo("Response returned") res.error = False return res def main(self): rospy.spin()
def add_localise_by_topic(tmap, node, json_str): #print req #data = json.loads(req.content) #print data msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": node, "pointset": tmap} query_meta = {} #query_meta["pointset"] = tmap #query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query( topological_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) if len(available) != 1: #succeded = False print 'there are no nodes or more than 1 with that name' else: #succeded = True for i in available: if not i[0].localise_by_topic: msgid = i[1]['_id'] i[0].localise_by_topic = json_str #print i[0] print "Updating %s--%s" % (i[0].pointset, i[0].name) msg_store.update_id(msgid, i[0], i[1], upsert=False)
def get_maps(): """ Queries the database and returns details of the available topological maps. :return: A dictionary where each key is the name of a topological map and each item is a dictionary of details. Details are: "number_nodes" ; integer "edge_actions" : list of action server names used for traversal "last_modified" : datetime.datetime object for the last time a node was inserted """ maps = dict() msg_store = MessageStoreProxy(collection='topological_maps') nodes = msg_store.query(TopologicalNode._type) for node in nodes: pointset = node[1]["pointset"] if not maps.has_key(pointset): maps[pointset] = { "number_nodes": 0, "edge_actions": set(), "last_modified": "" } maps[pointset]["number_nodes"] += 1 if (maps[pointset]["last_modified"] == "" or node[1]["inserted_at"] > maps[pointset]["last_modified"]): maps[pointset]["last_modified"] = node[1]["inserted_at"] for edge in node[0].edges: maps[pointset]["edge_actions"].add(edge.action) return maps
def get_all_blocks_from_db(): msg_store = MessageStoreProxy() res = msg_store.query(Block._type) (blocks, metas) = zip(*res) return blocks
def modify_tag_cb(self, msg): succeded = True meta_out = None for node in msg.node: msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": node, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query( strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) for node_plus_meta in available: msgid = node_plus_meta[1]['_id'] if 'tag' in node_plus_meta[1]: if not msg.tag in node_plus_meta[1]['tag']: continue else: tag_ind = node_plus_meta[1]['tag'].index(msg.tag) node_plus_meta[1]['tag'][tag_ind] = msg.new_tag meta_out = str(node_plus_meta[1]) msg_store.update_id(msgid, node_plus_meta[0], node_plus_meta[1], upsert=True) if len(available) == 0: succeded = False return succeded, meta_out
def loadMap(self, point_set): msg_store = MessageStoreProxy(collection='topological_maps') points = strands_navigation_msgs.msg.TopologicalMap() points.name = point_set points.pointset = point_set query_meta = {} query_meta["pointset"] = point_set ntries = 1 map_found = False #Tries to load the map for a minute if not it quits while not map_found: available = len( msg_store.query( strands_navigation_msgs.msg.TopologicalNode._type, {}, query_meta)) if available <= 0: rospy.logerr("Desired pointset '" + point_set + "' not in datacentre, try :" + str(ntries)) if ntries <= 10: ntries += 1 rospy.sleep(rospy.Duration.from_sec(6)) else: raise Exception("Can't find waypoints.") return points #We just want to raise the exception not quit #sys.exit(2) else: map_found = True query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query( strands_navigation_msgs.msg.TopologicalNode._type, {}, query_meta) points.name = point_set points.pointset = point_set for i in message_list: b = i[0] points.nodes.append(b) points.map = points.nodes[0].map self.map_check(points) return points
class DBPlay(object): def __init__(self): self.db_name = rospy.get_param('~db_name','jsk_pr2_lifelog') self.col_name = rospy.get_param('~col_name', 'pr1012') self.duration = rospy.get_param('~duration', 30) self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) self.pub = rospy.Publisher('/object_detection_marker_array', MarkerArray) self.marker_count = 0 objs = self.msg_store.query(type=Object6DPose._type, meta_query={"inserted_at": { "$gt": datetime.now() - timedelta(days=self.duration) }}, sort_query=[("$natural", -1)]) first_obj_meta = objs[0][1] trans = [tuple(self.msg_store.query(type=TransformStamped._type, meta_query={"inserted_at": { "$lt": first_obj_meta["inserted_at"] }}, sort_query=[("$natural", -1)], single=True))] trans += self.msg_store.query(type=TransformStamped._type, meta_query={"inserted_at": { "$gt": first_obj_meta["inserted_at"] }}, sort_query=[("$natural", -1)]) j = 0 m_arr = MarkerArray() for i in range(len(objs)): o,o_meta = objs[i] t,t_meta = trans[j] if o_meta["inserted_at"] > t_meta["inserted_at"]: j += 1 t,t_meta = trans[j] ps = T.transformPoseWithTransformStamped(o.pose, t) m_arr.markers += V.poseStampedToLabeledSphereMarker([ps, o_meta], o.type) while not rospy.is_shutdown(): self.pub.publish(m_arr) rospy.sleep(1.0) rospy.logdebug("publishing objectdetection_marker_array")
class TfDatabasePublisher(object): rate = 125 transforms = None msg_store = None world = None def __init__(self, world_frame='world'): rospy.init_node("db_republisher") self.msg_store = MessageStoreProxy() service = rospy.Service('apps/database/reload', Empty, self._cb_db_reload) # Define the 'world' frame self.world = TransformStamped() self.world.header.frame_id = 'world' self.world.child_frame_id = world_frame self.world.transform.rotation.w = 1.0 rospy.loginfo("Started helping hand ...") # Publish the database at start if not self.reload_db(): raise Exception('Failed to reload the database') # Make the program stay awake rospy.spin() def _cb_db_reload(self, req): self.reload_db() return EmptyResponse() def reload_db(self): try: self.read_database() self.broadcast_transforms() return True except Exception as e: print("Reload failed with exception:\n{}".format(e)) return False def broadcast_transforms(self): # Acquire the current time for the world frame self.world.header.stamp = rospy.Time.now() self.transforms.append(self.world) # Publish all frames static_transform_broadcaster = tf2_ros.static_transform_broadcaster.StaticTransformBroadcaster( ) static_transform_broadcaster.sendTransform(self.transforms) def read_database(self): self.transforms = [] transforms_db = self.msg_store.query(TransformStamped._type) for transform in transforms_db: self.transforms.append(transform[0]) rospy.loginfo('Got {0} transformations from database!'.format( len(self.transforms)))
class SOMAROIQuery(): def __init__(self, soma_map, soma_conf): self.soma_map = soma_map self.soma_conf = soma_conf self._msg_store=MessageStoreProxy(collection="soma_roi") def get_polygon(self, roi_id): objs = self._msg_store.query(SOMAROIObject._type, message_query={"map": self.soma_map, "config": self.soma_conf, "roi_id": roi_id}) ids = [] poses = [] for o,om in objs: ids.append(o.id) poses.append(o.pose) sorted_poses = [_pose for (_id,_pose) in sorted(zip(ids, poses))] poly = Polygon() poly.points = [] for p in sorted_poses: point = Point() point.x = p.position.x point.y = p.position.y poly.points.append(point) return poly def get_rois(self, roi_type=None): """ Returns a set of roi IDs of the given type. If type not specified, returns all rois in this map/configuration. """ if roi_type is not None: objs = self._msg_store.query(SOMAROIObject._type, message_query={"map": self.soma_map, "config": self.soma_conf, "type": roi_type}) else: objs = self._msg_store.query(SOMAROIObject._type, message_query={"map": self.soma_map, "config": self.soma_conf} ) #TODO: here it would be nice to be able to use mongodb distinct function rois=set() for o in objs: rois.add(o[0].roi_id) return rois
def print_blocks_from_db(): msg_store = MessageStoreProxy() res = msg_store.query(Block._type) rospy.loginfo("stored blocks in scene DB are:") for (block, meta) in res: print meta print block
def get_soma_objects(self): """srv call to mongo and get the list of new objects and locations""" msg_store = MessageStoreProxy(database="soma2data", collection="soma2") objs = msg_store.query(SOMA2Object._type, message_query={"map_name":self.soma_map,"config":self.soma_conf}) print "queried soma2 objects >> ", objs self.soma_objects = ce.get_soma_objects() print "hard coded objects >> ", [self.soma_objects[r].keys() for r in self.soma_objects.keys()]
class PeriodicReplicatorClient(Thread): def __init__(self): Thread.__init__(self) self.dead = Event() self.interval = rospy.get_param("mongodb_replication_interval", 60 * 60 * 24) # default: 1 day self.delete_after_move = rospy.get_param("mongodb_replication_delete_after_move", False) self.replicate_interval = rospy.Duration(self.interval) self.database = rospy.get_param("robot/database") self.collections = sys.argv[2:] try: self.collections.append(rospy.get_param("robot/name")) except KeyError as e: rospy.logerr("specify param \"robot/name\" (e.g. pr1012, olive)") exit(1) self.periodic = rospy.get_param("~periodic", True) self.date_msg_store = MessageStoreProxy(database=self.database, collection="replication") def run(self): while not self.dead.wait(self.interval): move_before = self.time_after_last_replicate_date() self.move_entries(move_before) self.insert_replicate_date() def time_after_last_replicate_date(self): delta = 60 * 60 * 24 # 1 day try: last_replicated = self.date_msg_store.query(StringList._type, single=True, sort_query=[("$natural",-1)]) date = last_replicated[1]["inserted_at"] rospy.loginfo("last replicated at %s", date) delta = (dt.datetime.now() - date).seconds + 60 except Exception as e: rospy.logwarn("failed to search last replicated date from database: %s", e) finally: return rospy.Duration(delta) def insert_replicate_date(self): try: self.date_msg_store.insert(StringList(self.collections)) except Exception as e: rospy.logwarn("failed to insert last replicate date to database: %s", e) def move_entries(self, move_before): client = actionlib.SimpleActionClient('move_mongodb_entries', MoveEntriesAction) client.wait_for_server() goal = MoveEntriesGoal(database=self.database, collections=StringList(self.collections), move_before=move_before, delete_after_move=self.delete_after_move) client.send_goal(goal, feedback_cb=self.feedback_cb) client.wait_for_result() def feedback_cb(self, feedback): rospy.loginfo(feedback) def cancel(self): self.dead.set()
def rename_node(name, new_name, top_map_name): """ Renames a topological map node. All edges will be updated to reflect the change. @param name: the current name of the node to be changec @param new_name: the new name to give the node @param top_map_name: the name of the topological map to work with (pointset) @return (number of nodes changed, number of edges changed) """ maps = queries.get_maps() if top_map_name not in maps: raise Exception("Unknown topological map.") msg_store = MessageStoreProxy(collection='topological_maps') nodes = msg_store.query(TopologicalNode._type, {}, {'pointset': top_map_name}) node_names = [node.name for node, meta in nodes] node_renames = 0 edge_changes = 0 node_changes = 0 if name not in node_names: raise Exception("No such node.") if new_name in node_names: raise Exception("New node name already in use.") old_metas = [] for node, meta in nodes: old_metas.append(copy.deepcopy(meta)) if meta["node"] == name: meta["node"] = new_name if node.name == name: node.name = new_name node_renames += 1 if node_renames > 1: raise Exception("More than one node has the same name!") for edge in node.edges: if edge.node == name: edge.node = new_name if edge.edge_id.find(name) > -1: edge.edge_id = edge.edge_id.replace(name, new_name) edge_changes += 1 # Save the changed nodes for ((node, meta), old_meta) in zip(nodes, old_metas): changed = msg_store.update(node, meta, { 'name': old_meta['node'], 'pointset': meta['pointset'] }) if changed.success: node_changes += 1 return (node_changes, edge_changes)
def gather_stats(self): stats_collection = rospy.get_param('~stats_collection', 'nav_stats') msg_store = MessageStoreProxy(collection=stats_collection) to_add = [] for i in self.eids: query = { "topological_map": self.lnodes.name, "edge_id": i["edge_id"] } query_meta = {} if len(self.range) == 2: if self.range[1] < 1: upperlim = rospy.Time.now().secs else: upperlim = self.range[1] query_meta["epoch"] = {"$gte": self.range[0], "$lt": upperlim} #print query #print query_meta available = msg_store.query(NavStatistics._type, query, query_meta) # print len(available) edge_mod = {} edge_mod["model_id"] = i[ "model_id"] #self.lnodes.name+'__'+i["edge_id"] edge_mod["time_model_id"] = i["time_model_id"] edge_mod["dist"] = i["dist"] #self.lnodes.name+'__'+i["edge_id"] edge_mod["models"] = [] edge_mod["order"] = -1 edge_mod["t_order"] = -1 edge_mod["edge_id"] = i["edge_id"] for j in available: val = {} if j[0].status != 'fatal': val["st"] = 1 val["speed"] = i["dist"] / j[0].operation_time if val["speed"] > 1: val["speed"] = 1.0 else: val["st"] = 0 val["speed"] = 0.0 val["epoch"] = int( datetime.strptime( j[0].date_started, "%A, %B %d %Y, at %H:%M:%S hours").strftime('%s')) val["optime"] = j[0].operation_time edge_mod["models"].append(val) if len(available) > 0: to_add.append(edge_mod) else: self.unknowns.append(edge_mod) return to_add
def getTables(table_collection="paddedtablestrial"): '''Get the tables ''' msg_store2 = MessageStoreProxy(collection=table_collection) objs = msg_store2.query(Table._type, message_query={"update_count": { "$gte": 5 }}) tables, meta = zip(*objs) return tables
def gather_paths(start, end): nav_store = MessageStoreProxy(collection="nav_stats") task_store = MessageStoreProxy(collection="task_events") paths = [[]] for task_group in task_groups_in_window(start, end, task_store, event=[ TaskEvent.NAVIGATION_STARTED, TaskEvent.NAVIGATION_SUCCEEDED ]): meta_query = { "inserted_at": { "$gte": rostime_to_python(task_group[0].time), "$lte": rostime_to_python(task_group[-1].time) } } nav_stats = nav_store.query(NavStatistics._type, meta_query=meta_query) nav_stats.sort(key=lambda x: x[1]["epoch"]) for (stat, meta) in nav_stats: # if stat.status == 'success': # check they're joined up in space and time if stat.origin != 'none' and stat.final_node != 'none': if len(paths[-1]) > 0 and paths[-1][-1][ 0].final_node == stat.origin and close_in_time( paths[-1][-1], (stat, meta)): paths[-1].append((stat, meta)) # if not create a new path else: paths.append([(stat, meta)]) else: paths.append([]) paths.append([]) min_path_length = 2 paths = [path for path in paths if len(path) >= min_path_length] print len(paths) example_path = paths[-1] path_stats = [] for path in paths: path_stats.append( (path[0][0].origin, path[-1][0].final_node, path[0][1]['epoch'], sum([stat.operation_time for (stat, meta) in path]))) path_lengths = [len(path) for path in paths] print '{0} paths, max len {1}'.format(len(path_stats), max(path_lengths)) # n, bins, patches = plt.hist(path_lengths, bins = max(path_lengths)) # plt.show() return group_paths(path_stats)
def loadMap(self, point_set): msg_store = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["pointset"] = point_set # waiting for the map to be in the mongodb_store ntries = 1 map_found = False while not map_found: available = len( msg_store.query(TopologicalNode._type, {}, query_meta)) > 0 #print available if available <= 0: rospy.logerr("Desired pointset '" + point_set + "' not in datacentre, try :" + str(ntries)) #rospy.logerr("Available pointsets: "+str(available)) if ntries <= 10: ntries += 1 rospy.sleep(rospy.Duration.from_sec(6)) else: raise Exception("Can't find waypoints.") sys.exit(2) else: map_found = True query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(TopologicalNode._type, {}, query_meta) points = TopologicalMap() points.name = point_set points.map = point_set points.pointset = point_set #string last_updated for i in message_list: b = i[0] points.nodes.append(b) return points
def loadMap(self, point_set) : msg_store = MessageStoreProxy(collection='topological_maps') points = strands_navigation_msgs.msg.TopologicalMap() points.name = point_set points.pointset = point_set query_meta = {} query_meta["pointset"] = point_set ntries=1 map_found=False #Tries to load the map for a minute if not it quits while not map_found : available = len(msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, {}, query_meta)) if available <= 0 : rospy.logerr("Desired pointset '"+point_set+"' not in datacentre, try :"+str(ntries)) if ntries <=10 : ntries+=1 rospy.sleep(rospy.Duration.from_sec(6)) else : raise Exception("Can't find waypoints.") return points #We just want to raise the exception not quit #sys.exit(2) else: map_found=True query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, {}, query_meta) points.name = point_set points.pointset = point_set for i in message_list: b = i[0] points.nodes.append(b) points.map = points.nodes[0].map return points
def loadMap(self, point_set) : msg_store = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["pointset"] = point_set # waiting for the map to be in the mongodb_store ntries=1 map_found=False while not map_found : available = len(msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, {}, query_meta)) > 0 #print available if available <= 0 : rospy.logerr("Desired pointset '"+point_set+"' not in datacentre, try :"+str(ntries)) #rospy.logerr("Available pointsets: "+str(available)) if ntries <=10 : ntries+=1 rospy.sleep(rospy.Duration.from_sec(6)) else : raise Exception("Can't find waypoints.") sys.exit(2) else: map_found=True query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, {}, query_meta) points = strands_navigation_msgs.msg.TopologicalMap() points.name = point_set #points.map = point_set points.pointset = point_set #string last_updated for i in message_list: b = i[0] points.nodes.append(b) points.map = points.nodes[0].map return points
class DBPlay(object): def __init__(self): self.db_name = rospy.get_param('~db_name','jsk_robot_lifelog') self.col_name = rospy.get_param('~col_name', 'pr1012') self.use_ros_time = rospy.get_param('~use_ros_time', True) self.use_sim_time = rospy.get_param('use_sim_time', False) self.downsample = rospy.get_param("~downsample", 3) self.label_downsample = rospy.get_param("~label_downsample", 10) self.duration = rospy.get_param('~duration', 10) # days self.limit = rospy.get_param("~limit", 1000) self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) self.pub = rospy.Publisher('/move_base_marker_array', MarkerArray, queue_size=1) self.marker_count = 0 def run(self): while not rospy.is_shutdown(): rospy.loginfo("$gt: %d" % (rospy.Time.now().secs - self.duration * 60 * 60 * 24)) if self.use_ros_time or self.use_sim_time: rospy.loginfo("fetching data") trans = self.msg_store.query(type=TransformStamped._type, message_query={"header.stamp.secs": { "$lte": rospy.Time.now().secs, "$gt": rospy.Time.now().secs - self.duration * 60 * 60 * 24 }}, sort_query=[("$natural", 1)], limit=self.limit) else: trans = self.msg_store.query(type=TransformStamped._type, meta_query={"inserted_at": { "$gt": datetime.utcnow() - timedelta(days=self.duration) }}, sort_query=[("$natural", 1)]) rospy.loginfo("found %d msgs" % len(trans)) m_arr = MarkerArray() m_arr.markers = V.transformStampedArrayToLabeledLineStripMarker(trans[::self.downsample], label_downsample=self.label_downsample, discrete=True) m_arr.markers = V.transformStampedArrayToLabeledArrayMarker(trans[::self.downsample], label_downsample=self.label_downsample, discrete=True) self.pub.publish(m_arr) rospy.sleep(1.0) rospy.loginfo("publishing move_base_marker_array")
def rename_node(name, new_name, top_map_name): """ Renames a topological map node. All edges will be updated to reflect the change. @param name: the current name of the node to be changec @param new_name: the new name to give the node @param top_map_name: the name of the topological map to work with (pointset) @return (number of nodes changed, number of edges changed) """ maps = queries.get_maps() if top_map_name not in maps: raise Exception("Unknown topological map.") msg_store = MessageStoreProxy(collection='topological_maps') nodes = msg_store.query(TopologicalNode._type, {}, {'pointset':top_map_name}) node_names = [node.name for node,meta in nodes] node_renames = 0 edge_changes = 0 node_changes = 0 if name not in node_names: raise Exception("No such node.") if new_name in node_names: raise Exception("New node name already in use.") old_metas = [] for node, meta in nodes: old_metas.append(copy.deepcopy(meta)) if meta["node"] == name: meta["node"] = new_name if node.name == name: node.name = new_name node_renames += 1 if node_renames > 1: raise Exception("More than one node has the same name!") for edge in node.edges: if edge.node == name: edge.node = new_name if edge.edge_id.find(name) > -1: edge.edge_id = edge.edge_id.replace(name, new_name) edge_changes += 1 # Save the changed nodes for ((node, meta), old_meta) in zip(nodes, old_metas): changed = msg_store.update(node, meta, {'name':old_meta['node'], 'pointset':meta['pointset']}) if changed.success: node_changes += 1 return (node_changes, edge_changes)
def delete_map(self) : rospy.loginfo('Deleting map: '+self.name) msg_store = MessageStoreProxy(collection='topological_maps') query_meta = {} query_meta["pointset"] = self.name message_list = msg_store.query(TopologicalNode._type, {}, query_meta) for i in message_list: rm_id = str(i[1]['_id']) msg_store.delete(rm_id)
def gather_stats(self): stats_collection = rospy.get_param('~stats_collection', 'nav_stats') msg_store = MessageStoreProxy(collection=stats_collection) to_add=[] for i in self.eids: query = {"topological_map": self.lnodes.name, "edge_id":i["edge_id"]} query_meta={} if len(self.range) == 2: if self.range[1] < 1: upperlim = rospy.Time.now().secs else: upperlim = self.range[1] query_meta["epoch"] = {"$gte": self.range[0], "$lt" : upperlim} #print query #print query_meta available = msg_store.query(NavStatistics._type, query, query_meta) # print len(available) edge_mod={} edge_mod["model_id"]= i["model_id"]#self.lnodes.name+'__'+i["edge_id"] edge_mod["time_model_id"]=i["time_model_id"] edge_mod["dist"]= i["dist"]#self.lnodes.name+'__'+i["edge_id"] edge_mod["models"]=[] edge_mod["order"]=-1 edge_mod["t_order"]=-1 edge_mod["edge_id"]=i["edge_id"] for j in available: val = {} if j[0].status != 'fatal': val["st"] = 1 val["speed"] = i["dist"]/j[0].operation_time if val["speed"]>1: val["speed"]=1.0 else: val["st"] = 0 val["speed"] = 0.0 val["epoch"] = int(datetime.strptime(j[0].date_started, "%A, %B %d %Y, at %H:%M:%S hours").strftime('%s')) val["optime"] = j[0].operation_time edge_mod["models"].append(val) if len(available) > 0 : to_add.append(edge_mod) else : self.unknowns.append(edge_mod) return to_add
def export_data(tmap, data_window_start, data_window_end, data_window_size, data_windows): msg_store = MessageStoreProxy(collection='nav_stats') for window in range(data_windows): window_start = data_window_start + data_window_size * window window_end = data_window_end + data_window_size * window # get nav stats for window meta_query = {"epoch" : {"$gt": to_epoch(window_start), "$lt" : to_epoch(window_end)}} nav_stats = msg_store.query(NavStatistics._type, {'topological_map': tmap}, meta_query) write_stats(window_start, window_end, nav_stats)
def update_node_waypoint(self, node_name, new_pose) : msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : node_name, "pointset": self.name} query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1 : positionZ=available[0][0].pose.position.z available[0][0].pose = new_pose available[0][0].pose.position.z = positionZ msg_store.update(available[0][0], query_meta, query, upsert=True) else : rospy.logerr("Impossible to store in DB "+str(len(available))+" waypoints found after query") rospy.logerr("Available data: "+str(available))
def update_node_vertex(self, node_name, vertex_index, vertex_pose) : msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : node_name, "pointset": self.name} query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1 : #vertex_to_edit=available[0][0].verts[vertex_index] new_x_pos = -(available[0][0].pose.position.x - vertex_pose.position.x) new_y_pos = -(available[0][0].pose.position.y - vertex_pose.position.y) available[0][0].verts[vertex_index].x = new_x_pos available[0][0].verts[vertex_index].y = new_y_pos msg_store.update(available[0][0], query_meta, query, upsert=True) else : rospy.logerr("Impossible to store in DB "+str(len(available))+" waypoints found after query") rospy.logerr("Available data: "+str(available))
def add_edge(self, or_waypoint, de_waypoint, action, edge_id) : rospy.loginfo('Adding Edge from '+or_waypoint+' to '+de_waypoint+' using '+action) node_name = or_waypoint #nodeindx = self._get_node_index(edged[0]) msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : node_name, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) if len(available) == 1 : eids = [] for i in available[0][0].edges : eids.append(i.edge_id) if not edge_id or edge_id in eids: test=0 eid = '%s_%s' %(or_waypoint, de_waypoint) while eid in eids: eid = '%s_%s_%3d' %(or_waypoint, de_waypoint, test) test += 1 else: eid=edge_id edge = strands_navigation_msgs.msg.Edge() edge.node = de_waypoint edge.action = action edge.top_vel = 0.55 edge.edge_id = eid edge.map_2d = available[0][0].map available[0][0].edges.append(edge) #print available[0][0] msg_store.update(available[0][0], query_meta, query, upsert=True) return True else : rospy.logerr("Impossible to store in DB "+str(len(available))+" waypoints found after query") rospy.logerr("Available data: "+str(available)) return False
def add_content_cb(self, req): #print req data = json.loads(req.content) #print data msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : req.node, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) if len(available) != 1: succeded = False print 'there are no nodes or more than 1 with that name' else: succeded = True for i in available: msgid= i[1]['_id'] if 'contains' in i[1]: if type(data) is list : for j in data: if 'category' in j and 'name' in j : i[1]['contains'].append(j) elif type(data) is dict : if 'category' in data and 'name' in data : i[1]['contains'].append(data) else: a=[] if type(data) is list : for j in data: if 'category' in j and 'name' in j : a.append(j) elif type(data) is dict : if 'category' in data and 'name' in data : a.append(data) i[1]['contains']=a meta_out = str(i[1]) print "Updating %s--%s" %(i[0].pointset, i[0].name) msg_store.update_id(msgid, i[0], i[1], upsert = False) return succeded, meta_out
def build_model(self): """ Builds a model of mean duration for each edge using the defined nav stat range """ msg_store = MessageStoreProxy(collection='nav_stats') # reset model self.edge_to_duration = {} self.edge_to_probability = {} query_meta={} if len(self.range) == 2: if self.range[1] < 1: upperlim = rospy.Time.now().secs else: upperlim = self.range[1] query_meta["epoch"] = {"$gte": self.range[0], "$lt" : upperlim} for i in self.map.nodes : for j in i.edges: if j.edge_id not in self.edge_to_duration: query = {"topological_map": self.map.name, "edge_id": j.edge_id} stats = msg_store.query(NavStatistics._type, query, query_meta) rospy.loginfo('%s stats for %s' % (len(stats), j.edge_id)) # if no stats, use speed the same as speed-based predictor if len(stats) == 0: destination = get_node(self.map, j.node) if j.top_vel >= 0.1: self.edge_to_duration[j.edge_id] = rospy.Duration(get_distance_to_node(i, destination)/j.top_vel) else : self.edge_to_duration[j.edge_id] = rospy.Duration(get_distance_to_node(i, destination)/0.1) self.edge_to_probability[j.edge_id] = 0.0 else: durations = np.array([stat.operation_time for stat, meta in stats]) self.edge_to_duration[j.edge_id] = rospy.Duration(durations.mean()) successes = np.array([1 if stat.status == 'success' else 0 for stat, meta in stats]) self.edge_to_probability[j.edge_id] = successes.mean()
def check_sanity(client): msg_store = MessageStoreProxy(collection='topological_maps') db=client.message_store collection=db["topological_maps"] available = collection.find().distinct("_meta.pointset") print available for point_set in available: #get one message #search = {"_meta.pointset": pointset} query_meta = {} query_meta["pointset"] = point_set message_list = msg_store.query(TopologicalNode._type, {}, query_meta) #points = strands_navigation_msgs.msg.TopologicalMap() #points.name = point_set #points.map = point_set #points.pointset = point_set #string last_updated eids = [] for i in message_list: update = False print i[0].pointset, i[0].name, i[1]['_id'] if i[0].edges: for j in i[0].edges : if j.edge_id == '': update = True print 'needs edge id' j.edge_id = get_edge_id(i[0].name, j.node, eids) print 'new edge_id %s' %j.edge_id eids.append(j.edge_id) if j.top_vel <= 0.1 : update = True print 'needs top vel' j.top_vel = 0.55 if j.map_2d == '': update = True print 'needs map_2d' j.map_2d = i[0].map if update: msg_store.update_id(i[1]['_id'], i[0], i[1], upsert = False)
def get_nodes(map_name, point_set, meta_query): msg_store = MessageStoreProxy(collection="topological_maps") query_meta = {} query_meta["pointset"] = {} query_meta["map"] = {} query_meta["contains.category"] = {} query_meta["pointset"]['$regex'] = point_set query_meta["map"]['$regex'] = map_name query_meta["contains.category"]['$regex'] = meta_query nodes = msg_store.query(TopologicalNode._type, {}, query_meta); available = len(nodes) > 0 if available <= 0 : print "Desired pointset '"+point_set+"' not in datacentre" print "Available pointsets: "+str(available) return nodes