Пример #1
0
    def queue_from_db(self, running):
        # make sure there's an index on time in the collection so the sort operation doesn't require the whole collection to be loaded
        self.collection.ensure_index(TIME_KEY)                        
        # get all documents within the time window, sorted ascending order by time
        documents = self.collection.find({TIME_KEY: { '$gte': to_datetime(self.start_time), '$lte': to_datetime(self.end_time)}}, sort=[(TIME_KEY, pymongo.ASCENDING)])
        if documents.count() == 0:
            rospy.logwarn('No messages to play back from topic %s' % self.collection_name)
            return
        else:
            rospy.logdebug('Playing back %d messages', documents.count())


        # load message class for this collection, they should all be the same
        msg_cls = mg_util.load_class(documents[0]["_meta"]["stored_class"])

        # publisher won't be used until something is on the queue, so it's safe to construct it here
        self.publisher = rospy.Publisher(documents[0]["_meta"]["topic"], msg_cls, latch = documents[0]["_meta"]["latch"], queue_size = 10)

        for document in documents:
            if running.value:
                # instantiate the ROS message object from the dictionary retrieved from the db
                document['header']['stamp'] =  to_ros_time(document['header']['stamp'])
                message = mg_util.dictionary_to_message(document, msg_cls)            
                #print (message, document["header"]["stamp"])
                # put will only work while there is space in the queue, if not it will block until another take is performed
                self.to_publish.put((message, document["header"]["stamp"]))            
            else:
                break

        rospy.logdebug('All messages queued for topic %s' % self.collection_name)
 def insert_maps(self, data, new_pointset=None, force=False):
     current_maps = self.get_maps()
     for idx, tmap in enumerate(data):
         pointset = None
         if new_pointset != None: # If there are more than one map, it takes the custom pointset and appends an index
             pointset = new_pointset+str(idx+1) if len(data) > 1 else new_pointset
         first_node = True
         for i in tmap:
             try:
                 meta = i['meta']
                 meta['pointset'] = pointset if pointset != None else meta['pointset']
                 if meta['pointset'] in current_maps and first_node:
                     first_node = False
                     if not force:
                         rospy.logwarn("Map '%s' already in datacentre, skipping! Use -f to force override or change pointset name with --pointset" % meta['pointset'])
                         break
                     else:
                         topo_map = topological_map(meta['pointset'])
                         topo_map.delete_map()
                 elif first_node:
                     first_node = False
                     rospy.loginfo("Inserting map: %s" % meta['pointset'])
                 msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode)
                 self.msg_store.insert(msgv,meta)
             except TypeError:
                 pass # Not a topo map
 def insert_maps(self, data, new_pointset=None, force=False):
     current_maps = self.get_maps()
     for idx, tmap in enumerate(data):
         pointset = None
         if new_pointset != None: # If there are more than one map, it takes the custom pointset and appends an index
             pointset = new_pointset+str(idx+1) if len(data) > 1 else new_pointset
         first_node = True
         for i in tmap:
             try:
                 meta = i['meta']
                 meta['pointset'] = pointset if pointset != None else meta['pointset']
                 if meta['pointset'] in current_maps and first_node:
                     first_node = False
                     if not force:
                         rospy.logwarn("Map '%s' already in datacentre, skipping! Use -f to force override or change pointset name with --pointset" % meta['pointset'])
                         break
                     else:
                         topo_map = topological_map(meta['pointset'])
                         topo_map.delete_map()
                 elif first_node:
                     first_node = False
                     rospy.loginfo("Inserting map: %s" % meta['pointset'])
                 msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode)
                 self.msg_store.insert(msgv,meta)
             except TypeError:
                 pass # Not a topo map
    def query_messages_ros_srv(self, req):
        """
        Returns t
        """
        collection = self._mongo_client[req.database][req.collection]

        # build the query doc         
        obj_query = self.to_query_dict(req.message_query, req.meta_query)

        # restrict results to have the type asked for
        obj_query["_meta.stored_type"] = req.type

        # TODO start using some string constants!

        rospy.logdebug("query document: %s", obj_query) 
        
        # this is a list of entries in dict format including meta
        sort_query_dict = dc_util.string_pair_list_to_dictionary(req.sort_query)
        sort_query_tuples = []
        for k,v in sort_query_dict.iteritems():
            try:
                sort_query_tuples.append((k, int(v)))
            except ValueError:
                sort_query_tuples.append((k,v))

        entries =  dc_util.query_message(collection, obj_query, sort_query_tuples, req.single, req.limit)

        # keep trying clients until we find an answer
        for extra_client in self.extra_clients:
            if len(entries) == 0:
                extra_collection = extra_client[req.database][req.collection]            
                entries =  dc_util.query_message(extra_collection, obj_query, sort_query_tuples, req.single, req.limit)
                if len(entries) > 0:
                    rospy.loginfo("found result in extra datacentre")             
            else:
                break


        # rospy.logdebug("entries: %s", entries) 

        serialised_messages = ()
        metas = ()

        for entry in entries:
            # load the class object for this type
            # TODO this should be the same for every item in the list, so could reuse
            cls = dc_util.load_class(entry["_meta"]["stored_class"])
            # instantiate the ROS message object from the dictionary retrieved from the db
            message = dc_util.dictionary_to_message(entry, cls)            
            # the serialise this object in order to be sent in a generic form
            serialised_messages = serialised_messages + (dc_util.serialise_message(message), )            
            # add ObjectID into meta as it might be useful later
            entry["_meta"]["_id"] = entry["_id"]
            # serialise meta
            metas = metas + (StringPairList([StringPair(dc_srv.MongoQueryMsgRequest.JSON_QUERY, json.dumps(entry["_meta"], default=json_util.default))]), )

        return [serialised_messages, metas]
Пример #5
0
    def queue_from_db(self, running):
        # make sure there's an index on time in the collection so the sort operation doesn't require the whole collection to be loaded
        self.collection.ensure_index(TIME_KEY)
        # get all documents within the time window, sorted ascending order by time
        documents = self.collection.find(
            {
                TIME_KEY: {
                    '$gte': to_datetime(self.start_time),
                    '$lte': to_datetime(self.end_time)
                }
            },
            sort=[(TIME_KEY, pymongo.ASCENDING)])

        if documents.count() == 0:
            rospy.logwarn('No messages to play back from topic %s' %
                          self.collection_name)
            return
        else:
            rospy.logdebug('Playing back %d messages', documents.count())

        # load message class for this collection, they should all be the same
        msg_cls = mg_util.load_class(documents[0]["_meta"]["stored_class"])

        latch = False
        if "latch" in documents[0]["_meta"]:
            latch = documents[0]["_meta"]["latch"]

        # publisher won't be used until something is on the queue, so it's safe to construct it here
        self.publisher = rospy.Publisher(documents[0]["_meta"]["topic"],
                                         msg_cls,
                                         latch=latch,
                                         queue_size=10)

        for document in documents:
            if running.value:
                # instantiate the ROS message object from the dictionary retrieved from the db
                message = mg_util.dictionary_to_message(document, msg_cls)
                # print (message, document["_meta"]["inserted_at"])
                # put will only work while there is space in the queue, if not it will block until another take is performed
                self.to_publish.put(
                    (message, to_ros_time(document["_meta"]["inserted_at"])))
            else:
                break

        rospy.logdebug('All messages queued for topic %s' %
                       self.collection_name)
Пример #6
0
def update_node(node, pointset):
    to_pop = ['edges', 'verts', 'pose']
    n = TopologicalNode()

    n.yaw_goal_tolerance = 0.1
    n.xy_goal_tolerance = 0.3

    lsl = n.__slots__
    for j in lsl:
        if node.has_key(j) and not j in to_pop:
            setattr(n, j, node[j])

    ppos = node['pose']

    p = dc_util.dictionary_to_message(ppos, Pose)
    #print p
    #n.pose.append(p)
    n.pose = p
    n.pointset = pointset
    return n
def update_node(node, pointset):
    to_pop=['edges','verts','pose']
    n = TopologicalNode()

    n.yaw_goal_tolerance = 0.1
    n.xy_goal_tolerance = 0.3

    lsl = n.__slots__
    for j in lsl:
        if node.has_key(j) and not j in to_pop:
            setattr(n, j, node[j])

    ppos = node['pose']

    p = dc_util.dictionary_to_message(ppos, Pose)
    #print p
    #n.pose.append(p)
    n.pose = p
    n.pointset = pointset
    return n
#!/usr/bin/env python

import json
import yaml
import sys

from strands_navigation_msgs.msg import TopologicalNode
import mongodb_store.util as dc_util
from mongodb_store.message_store import MessageStoreProxy

if __name__ == '__main__':
    if len(sys.argv) < 2:
        print "usage: insert_map input_file.txt"
        sys.exit(2)

    filename = str(sys.argv[1])
    #dataset_name=str(sys.argv[2])
    #map_name=str(sys.argv[3])

    msg_store = MessageStoreProxy(collection='topological_maps')

    json_data = open(filename, 'rb').read()

    data = json.loads(json_data)

    for i in data:
        meta = i['meta']
        msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode)
        msg_store.insert(msgv, meta)
        #mongodb_store.util.store_message(points_db,p,val)
Пример #9
0
    def query_messages_ros_srv(self, req):
        """
        Returns t
        """
        collection = self._mongo_client[req.database][req.collection]

        # build the query doc
        obj_query = self.to_query_dict(req.message_query, req.meta_query)

        # restrict results to have the type asked for
        obj_query["_meta.stored_type"] = req.type

        # TODO start using some string constants!

        rospy.logdebug("query document: %s", obj_query)

        # this is a list of entries in dict format including meta
        sort_query_dict = dc_util.string_pair_list_to_dictionary(
            req.sort_query)
        sort_query_tuples = []
        for k, v in iteritems(sort_query_dict):
            try:
                sort_query_tuples.append((k, int(v)))
            except ValueError:
                sort_query_tuples.append((k, v))
            # this is a list of entries in dict format including meta

        projection_query_dict = dc_util.string_pair_list_to_dictionary(
            req.projection_query)
        projection_meta_dict = dict()
        projection_meta_dict["_meta"] = 1

        entries = dc_util.query_message(collection, obj_query,
                                        sort_query_tuples,
                                        projection_query_dict, req.single,
                                        req.limit)
        if projection_query_dict:
            meta_entries = dc_util.query_message(collection, obj_query,
                                                 sort_query_tuples,
                                                 projection_meta_dict,
                                                 req.single, req.limit)

        # keep trying clients until we find an answer
        if self.replicate_on_write:
            for extra_client in self.extra_clients:
                if len(entries) == 0:
                    extra_collection = extra_client[req.database][
                        req.collection]
                    entries = dc_util.query_message(extra_collection,
                                                    obj_query,
                                                    sort_query_tuples,
                                                    projection_query_dict,
                                                    req.single, req.limit)
                    if projection_query_dict:
                        meta_entries = dc_util.query_message(
                            extra_collection, obj_query, sort_query_tuples,
                            projection_meta_dict, req.single, req.limit)
                    if len(entries) > 0:
                        rospy.loginfo("found result in extra datacentre")
                else:
                    break

        serialised_messages = ()
        metas = ()

        for idx, entry in enumerate(entries):

            # load the class object for this type
            # TODO this should be the same for every item in the list, so could reuse
            cls = dc_util.load_class(entry["_meta"]["stored_class"])
            # instantiate the ROS message object from the dictionary retrieved from the db
            message = dc_util.dictionary_to_message(entry, cls)
            # the serialise this object in order to be sent in a generic form
            serialised_messages = serialised_messages + (
                dc_util.serialise_message(message), )
            # add ObjectID into meta as it might be useful later
            if projection_query_dict:
                entry["_meta"]["_id"] = meta_entries[idx]["_id"]
            else:
                entry["_meta"]["_id"] = entry["_id"]
            # serialise meta
            metas = metas + (StringPairList([
                StringPair(
                    dc_srv.MongoQueryMsgRequest.JSON_QUERY,
                    json.dumps(entry["_meta"], default=json_util.default))
            ]), )

        return [serialised_messages, metas]
        print "usage: insert_map input_file.txt"
	sys.exit(2)

    filename=str(sys.argv[1])
    #dataset_name=str(sys.argv[2])
    #map_name=str(sys.argv[3])

    msg_store = MessageStoreProxy(collection='topological_maps')

    json_data=open(filename, 'rb').read()
    
    data = json.loads(json_data)
    
    for i in data:
        meta = i['meta']
        msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode)
        msg_store.insert(msgv,meta)
        #mongodb_store.util.store_message(points_db,p,val)












Пример #11
0
#To save depth images without losing values due to png

def save_depth_image(image, filename):
    cv.Save(filename, image, name="depth")

#Connecting to the mongodb database. 

mong = pymongo.MongoClient("bruxelles", 62345)
db = mong['message_store'].upper_bodies
nimg = db.find({},{"ubd_rgb.encoding":1}).count()
first_doc = True

raw_input("A total of {} images recorded. Press enter to start dumping into {}".format(nimg, os.getcwd()))
for i, entry in enumerate(db.find()):
    cls = dc_util.load_class(entry["_meta"]["stored_class"])
    message = dc_util.dictionary_to_message(entry, cls)
    
    #print(type(message))
    #print(message.robot)

    arrayPosition=0
    size = len(message.ubd_rgb)


# Dumping the rgb and depth images + adding the images in a json file

    if(size!=0):
        while(arrayPosition<=size-1):

			img_rgb = CvBridge().imgmsg_to_cv2(message.ubd_rgb[arrayPosition])
			img_d = CvBridge().imgmsg_to_cv2(message.ubd_d[arrayPosition])