def update_ros_srv(self, req):
        """
        Updates a msg in the store
        """
        # rospy.lrosoginfo("called")
        collection = self._mongo_client[req.database][req.collection]

        # build the query doc         
        obj_query = self.to_query_dict(req.message_query, req.meta_query)

        # restrict results to have the type asked for
        obj_query["_meta.stored_type"] = req.message.type

        # TODO start using some string constants!

        rospy.logdebug("update spec document: %s", obj_query) 

        # deserialize data into object
        obj = dc_util.deserialise_message(req.message)        
      
        meta = dc_util.string_pair_list_to_dictionary(req.meta)
        meta['last_updated_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec())
        meta['last_updated_by'] = req._connection_header['callerid']
      
        (obj_id, altered) = dc_util.update_message(collection, obj_query, obj, meta, req.upsert)

        if self.replicate_on_write:            
            # also do update to extra datacentres
            for extra_client in self.extra_clients:
                extra_collection = extra_client[req.database][req.collection]            
                dc_util.update_message(extra_collection, obj_query, obj, meta, req.upsert)

        return str(obj_id), altered
Пример #2
0
    def insert_ros_srv(self, req):
        """
        Receives a 
        """


        # deserialize data into object
        obj = dc_util.deserialise_message(req.message)        
        # convert input tuple to dict
        meta = dc_util.string_pair_list_to_dictionary(req.meta)
        # get requested collection from the db, creating if necessary
        collection = self._mongo_client[req.database][req.collection]

        # try:
        meta['inserted_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec())
        meta['inserted_by'] = req._connection_header['callerid']
        obj_id = dc_util.store_message(collection, obj, meta)


        if self.replicate_on_write:            
            # also do insert to extra datacentres, making sure object ids are consistent
            for extra_client in self.extra_clients:
                extra_collection = extra_client[req.database][req.collection]            
                dc_util.store_message(extra_collection, obj, meta, obj_id)

        return str(obj_id)   
Пример #3
0
    def insert_ros_srv(self, req):
        """
        Receives a 
        """

        # deserialize data into object
        obj = dc_util.deserialise_message(req.message)
        # convert input tuple to dict
        meta = dc_util.string_pair_list_to_dictionary(req.meta)
        # get requested collection from the db, creating if necessary
        collection = self._mongo_client[req.database][req.collection]

        # try:
        meta['inserted_at'] = datetime.utcfromtimestamp(
            rospy.get_rostime().to_sec())
        meta['inserted_by'] = req._connection_header['callerid']
        obj_id = dc_util.store_message(collection, obj, meta)

        if self.replicate_on_write:
            # also do insert to extra datacentres, making sure object ids are consistent
            for extra_client in self.extra_clients:
                extra_collection = extra_client[req.database][req.collection]
                dc_util.store_message(extra_collection, obj, meta, obj_id)

        return str(obj_id)
    def insert_ros_srv(self, req):
        """
        Receives a
        """

        # deserialize data into object
        obj = dc_util.deserialise_message(req.message)
        # convert input tuple to dict
        meta = dc_util.string_pair_list_to_dictionary(req.meta)
        # get requested collection from the db, creating if necessary
        collection = self._mongo_client[req.database][req.collection]

        #check if the object has the location attribute
        if hasattr(obj, 'pose'):
            # if it does create a location index
            collection.create_index([("loc", pymongo.GEO2D)])
        #check if the object has the location attribute
        if hasattr(obj, 'geotype'):
            # if it does create a location index
            collection.create_index([("geoloc", pymongo.GEOSPHERE)])

#check if the object has the timestamp attribute TODO ?? really necessary
#if hasattr(obj, 'logtimestamp'):
# if it does create a location index
#  collection.create_index([("datetime", pymongo.GEO2D)])

# try:
        stamp = rospy.get_rostime()
        meta['inserted_at'] = datetime.utcfromtimestamp(stamp.to_sec())
        meta['inserted_by'] = req._connection_header['callerid']
        if hasattr(obj, "header") and hasattr(obj.header, "stamp") and\
           isinstance(obj.header.stamp, genpy.Time):
            stamp = obj.header.stamp
        elif isinstance(obj, TFMessage):
            if obj.transforms:
                transforms = sorted(obj.transforms,
                                    key=lambda m: m.header.stamp,
                                    reverse=True)
                stamp = transforms[0].header.stamp

        meta['published_at'] = datetime.utcfromtimestamp(stamp.to_sec())
        meta['timestamp'] = stamp.to_nsec()

        obj_id = dc_util.store_message(collection, obj, meta)

        if self.replicate_on_write:
            # also do insert to extra datacentres, making sure object ids are consistent
            for extra_client in self.extra_clients:
                extra_collection = extra_client[req.database][req.collection]
                dc_util.store_message(extra_collection, obj, meta, obj_id)

        return str(obj_id)
Пример #5
0
def insert_msg(message, database='message_store', collection='message_store'):
    sp = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps({}, default=json_util.default)),)


    # deserialize data into object
    obj = dc_util.deserialise_message(dc_util.serialise_message(message))        
    # obj = to_msg(message)
    # convert input tuple to dict
    meta = dc_util.string_pair_list_to_dictionary(StringPairList(sp))
    # get requested collection from the db, creating if necessary
    collection = _mongo_client[database][collection]
    meta['inserted_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec())
    meta['inserted_by'] = 'asdf'
    obj_id = dc_util.store_message(collection, obj, meta)

    return str(obj_id)
    def insert_ros_srv(self, req):
        """
        Receives a 
        """


        # deserialize data into object
        obj = dc_util.deserialise_message(req.message)        
        # convert input tuple to dict
        meta = dc_util.string_pair_list_to_dictionary(req.meta)
        # get requested collection from the db, creating if necessary
        collection = self._mongo_client[req.database][req.collection]
	
	#check if the object has the location attribute
	if hasattr(obj, 'pose'):
	   # if it does create a location index
    	   collection.create_index([("loc", pymongo.GEO2D)])
        #check if the object has the location attribute
	if hasattr(obj, 'geotype'):
	   # if it does create a location index
    	   collection.create_index([("geoloc", pymongo.GEOSPHERE)])
	
	#check if the object has the timestamp attribute TODO ?? really necessary
	#if hasattr(obj, 'logtimestamp'):
	   # if it does create a location index
    	 #  collection.create_index([("datetime", pymongo.GEO2D)])


        # try:
        meta['inserted_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec())
        meta['inserted_by'] = req._connection_header['callerid']
        obj_id = dc_util.store_message(collection, obj, meta)
      

        if self.replicate_on_write:            
            # also do insert to extra datacentres, making sure object ids are consistent
            for extra_client in self.extra_clients:
                extra_collection = extra_client[req.database][req.collection]            
                dc_util.store_message(extra_collection, obj, meta, obj_id)

        return str(obj_id)   
Пример #7
0
    def insert_ros_srv(self, req):
        """
        Receives a
        """
        # deserialize data into object
        obj = dc_util.deserialise_message(req.message)
        # convert input tuple to dict
        meta = dc_util.string_pair_list_to_dictionary(req.meta)
        # get requested collection from the db, creating if necessary
        collection = self._mongo_client[req.database][req.collection]
        # check if the object has the location attribute
        if hasattr(obj, 'pose'):
            # if it does create a location index
            collection.create_index([("loc", pymongo.GEO2D)])

        #check if the object has the location attribute
        if hasattr(obj, 'geotype'):
            # if it does create a location index
            collection.create_index([("geoloc", pymongo.GEOSPHERE)])

        # check if the object has the timestamp attribute TODO ?? really necessary
        # if hasattr(obj, 'logtimestamp'):
        # if it does create a location index
        #  collection.create_index([("datetime", pymongo.GEO2D)])

        # try:
        meta['inserted_at'] = datetime.utcfromtimestamp(
            rospy.get_rostime().to_sec())
        meta['inserted_by'] = req._connection_header['callerid']
        obj_id = dc_util.store_message(collection, obj, meta)

        if self.replicate_on_write:
            # also do insert to extra datacentres, making sure object ids are consistent
            for extra_client in self.extra_clients:
                extra_collection = extra_client[req.database][req.collection]
                dc_util.store_message(extra_collection, obj, meta, obj_id)

        return str(obj_id)
Пример #8
0
    def update_ros_srv(self, req):
        """
        Updates a msg in the store
        """
        # rospy.lrosoginfo("called")
        collection = self._mongo_client[req.database][req.collection]

        # build the query doc
        obj_query = self.to_query_dict(req.message_query, req.meta_query)

        # restrict results to have the type asked for
        obj_query["_meta.stored_type"] = req.message.type

        # TODO start using some string constants!

        rospy.logdebug("update spec document: %s", obj_query)

        # deserialize data into object
        obj = dc_util.deserialise_message(req.message)

        meta = dc_util.string_pair_list_to_dictionary(req.meta)
        meta['last_updated_at'] = datetime.utcfromtimestamp(
            rospy.get_rostime().to_sec())
        meta['last_updated_by'] = req._connection_header['callerid']

        (obj_id, altered) = dc_util.update_message(collection, obj_query, obj,
                                                   meta, req.upsert)

        if self.replicate_on_write:
            # also do update to extra datacentres
            for extra_client in self.extra_clients:
                extra_collection = extra_client[req.database][req.collection]
                dc_util.update_message(extra_collection, obj_query, obj, meta,
                                       req.upsert)

        return str(obj_id), altered