def to_query_dict(self, message_query, meta_query): """ Decodes and combines the given StringPairList queries into a single mongodb query """ obj_query = dc_util.string_pair_list_to_dictionary(message_query) bare_meta_query = dc_util.string_pair_list_to_dictionary(meta_query) for (k, v) in iteritems(bare_meta_query): obj_query["_meta." + k] = v return obj_query
def to_query_dict(self, message_query, meta_query): """ Decodes and combines the given StringPairList queries into a single mongodb query """ obj_query = dc_util.string_pair_list_to_dictionary(message_query) bare_meta_query = dc_util.string_pair_list_to_dictionary(meta_query) for (k, v) in bare_meta_query.iteritems(): obj_query["_meta." + k] = v return obj_query
def update_ros_srv(self, req): """ Updates a msg in the store """ # rospy.lrosoginfo("called") collection = self._mongo_client[req.database][req.collection] # build the query doc obj_query = self.to_query_dict(req.message_query, req.meta_query) # restrict results to have the type asked for obj_query["_meta.stored_type"] = req.message.type # TODO start using some string constants! rospy.logdebug("update spec document: %s", obj_query) # deserialize data into object obj = dc_util.deserialise_message(req.message) meta = dc_util.string_pair_list_to_dictionary(req.meta) meta['last_updated_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec()) meta['last_updated_by'] = req._connection_header['callerid'] (obj_id, altered) = dc_util.update_message(collection, obj_query, obj, meta, req.upsert) if self.replicate_on_write: # also do update to extra datacentres for extra_client in self.extra_clients: extra_collection = extra_client[req.database][req.collection] dc_util.update_message(extra_collection, obj_query, obj, meta, req.upsert) return str(obj_id), altered
def insert_ros_srv(self, req): """ Receives a """ # deserialize data into object obj = dc_util.deserialise_message(req.message) # convert input tuple to dict meta = dc_util.string_pair_list_to_dictionary(req.meta) # get requested collection from the db, creating if necessary collection = self._mongo_client[req.database][req.collection] # try: meta['inserted_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec()) meta['inserted_by'] = req._connection_header['callerid'] obj_id = dc_util.store_message(collection, obj, meta) if self.replicate_on_write: # also do insert to extra datacentres, making sure object ids are consistent for extra_client in self.extra_clients: extra_collection = extra_client[req.database][req.collection] dc_util.store_message(extra_collection, obj, meta, obj_id) return str(obj_id)
def insert_ros_srv(self, req): """ Receives a """ # deserialize data into object obj = dc_util.deserialise_message(req.message) # convert input tuple to dict meta = dc_util.string_pair_list_to_dictionary(req.meta) # get requested collection from the db, creating if necessary collection = self._mongo_client[req.database][req.collection] # try: meta['inserted_at'] = datetime.utcfromtimestamp( rospy.get_rostime().to_sec()) meta['inserted_by'] = req._connection_header['callerid'] obj_id = dc_util.store_message(collection, obj, meta) if self.replicate_on_write: # also do insert to extra datacentres, making sure object ids are consistent for extra_client in self.extra_clients: extra_collection = extra_client[req.database][req.collection] dc_util.store_message(extra_collection, obj, meta, obj_id) return str(obj_id)
def query_messages_ros_srv(self, req): """ Returns t """ collection = self._mongo_client[req.database][req.collection] # build the query doc obj_query = self.to_query_dict(req.message_query, req.meta_query) # restrict results to have the type asked for obj_query["_meta.stored_type"] = req.type # TODO start using some string constants! rospy.logdebug("query document: %s", obj_query) # this is a list of entries in dict format including meta sort_query_dict = dc_util.string_pair_list_to_dictionary(req.sort_query) sort_query_tuples = [] for k,v in sort_query_dict.iteritems(): try: sort_query_tuples.append((k, int(v))) except ValueError: sort_query_tuples.append((k,v)) entries = dc_util.query_message(collection, obj_query, sort_query_tuples, req.single, req.limit) # keep trying clients until we find an answer for extra_client in self.extra_clients: if len(entries) == 0: extra_collection = extra_client[req.database][req.collection] entries = dc_util.query_message(extra_collection, obj_query, sort_query_tuples, req.single, req.limit) if len(entries) > 0: rospy.loginfo("found result in extra datacentre") else: break # rospy.logdebug("entries: %s", entries) serialised_messages = () metas = () for entry in entries: # load the class object for this type # TODO this should be the same for every item in the list, so could reuse cls = dc_util.load_class(entry["_meta"]["stored_class"]) # instantiate the ROS message object from the dictionary retrieved from the db message = dc_util.dictionary_to_message(entry, cls) # the serialise this object in order to be sent in a generic form serialised_messages = serialised_messages + (dc_util.serialise_message(message), ) # add ObjectID into meta as it might be useful later entry["_meta"]["_id"] = entry["_id"] # serialise meta metas = metas + (StringPairList([StringPair(dc_srv.MongoQueryMsgRequest.JSON_QUERY, json.dumps(entry["_meta"], default=json_util.default))]), ) return [serialised_messages, metas]
def insert_ros_srv(self, req): """ Receives a """ # deserialize data into object obj = dc_util.deserialise_message(req.message) # convert input tuple to dict meta = dc_util.string_pair_list_to_dictionary(req.meta) # get requested collection from the db, creating if necessary collection = self._mongo_client[req.database][req.collection] #check if the object has the location attribute if hasattr(obj, 'pose'): # if it does create a location index collection.create_index([("loc", pymongo.GEO2D)]) #check if the object has the location attribute if hasattr(obj, 'geotype'): # if it does create a location index collection.create_index([("geoloc", pymongo.GEOSPHERE)]) #check if the object has the timestamp attribute TODO ?? really necessary #if hasattr(obj, 'logtimestamp'): # if it does create a location index # collection.create_index([("datetime", pymongo.GEO2D)]) # try: stamp = rospy.get_rostime() meta['inserted_at'] = datetime.utcfromtimestamp(stamp.to_sec()) meta['inserted_by'] = req._connection_header['callerid'] if hasattr(obj, "header") and hasattr(obj.header, "stamp") and\ isinstance(obj.header.stamp, genpy.Time): stamp = obj.header.stamp elif isinstance(obj, TFMessage): if obj.transforms: transforms = sorted(obj.transforms, key=lambda m: m.header.stamp, reverse=True) stamp = transforms[0].header.stamp meta['published_at'] = datetime.utcfromtimestamp(stamp.to_sec()) meta['timestamp'] = stamp.to_nsec() obj_id = dc_util.store_message(collection, obj, meta) if self.replicate_on_write: # also do insert to extra datacentres, making sure object ids are consistent for extra_client in self.extra_clients: extra_collection = extra_client[req.database][req.collection] dc_util.store_message(extra_collection, obj, meta, obj_id) return str(obj_id)
def insert_msg(message, database='message_store', collection='message_store'): sp = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps({}, default=json_util.default)),) # deserialize data into object obj = dc_util.deserialise_message(dc_util.serialise_message(message)) # obj = to_msg(message) # convert input tuple to dict meta = dc_util.string_pair_list_to_dictionary(StringPairList(sp)) # get requested collection from the db, creating if necessary collection = _mongo_client[database][collection] meta['inserted_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec()) meta['inserted_by'] = 'asdf' obj_id = dc_util.store_message(collection, obj, meta) return str(obj_id)
def insert_ros_srv(self, req): """ Receives a """ # deserialize data into object obj = dc_util.deserialise_message(req.message) # convert input tuple to dict meta = dc_util.string_pair_list_to_dictionary(req.meta) # get requested collection from the db, creating if necessary collection = self._mongo_client[req.database][req.collection] #check if the object has the location attribute if hasattr(obj, 'pose'): # if it does create a location index collection.create_index([("loc", pymongo.GEO2D)]) #check if the object has the location attribute if hasattr(obj, 'geotype'): # if it does create a location index collection.create_index([("geoloc", pymongo.GEOSPHERE)]) #check if the object has the timestamp attribute TODO ?? really necessary #if hasattr(obj, 'logtimestamp'): # if it does create a location index # collection.create_index([("datetime", pymongo.GEO2D)]) # try: meta['inserted_at'] = datetime.utcfromtimestamp(rospy.get_rostime().to_sec()) meta['inserted_by'] = req._connection_header['callerid'] obj_id = dc_util.store_message(collection, obj, meta) if self.replicate_on_write: # also do insert to extra datacentres, making sure object ids are consistent for extra_client in self.extra_clients: extra_collection = extra_client[req.database][req.collection] dc_util.store_message(extra_collection, obj, meta, obj_id) return str(obj_id)
def insert_ros_srv(self, req): """ Receives a """ # deserialize data into object obj = dc_util.deserialise_message(req.message) # convert input tuple to dict meta = dc_util.string_pair_list_to_dictionary(req.meta) # get requested collection from the db, creating if necessary collection = self._mongo_client[req.database][req.collection] # check if the object has the location attribute if hasattr(obj, 'pose'): # if it does create a location index collection.create_index([("loc", pymongo.GEO2D)]) #check if the object has the location attribute if hasattr(obj, 'geotype'): # if it does create a location index collection.create_index([("geoloc", pymongo.GEOSPHERE)]) # check if the object has the timestamp attribute TODO ?? really necessary # if hasattr(obj, 'logtimestamp'): # if it does create a location index # collection.create_index([("datetime", pymongo.GEO2D)]) # try: meta['inserted_at'] = datetime.utcfromtimestamp( rospy.get_rostime().to_sec()) meta['inserted_by'] = req._connection_header['callerid'] obj_id = dc_util.store_message(collection, obj, meta) if self.replicate_on_write: # also do insert to extra datacentres, making sure object ids are consistent for extra_client in self.extra_clients: extra_collection = extra_client[req.database][req.collection] dc_util.store_message(extra_collection, obj, meta, obj_id) return str(obj_id)
def update_ros_srv(self, req): """ Updates a msg in the store """ # rospy.lrosoginfo("called") collection = self._mongo_client[req.database][req.collection] # build the query doc obj_query = self.to_query_dict(req.message_query, req.meta_query) # restrict results to have the type asked for obj_query["_meta.stored_type"] = req.message.type # TODO start using some string constants! rospy.logdebug("update spec document: %s", obj_query) # deserialize data into object obj = dc_util.deserialise_message(req.message) meta = dc_util.string_pair_list_to_dictionary(req.meta) meta['last_updated_at'] = datetime.utcfromtimestamp( rospy.get_rostime().to_sec()) meta['last_updated_by'] = req._connection_header['callerid'] (obj_id, altered) = dc_util.update_message(collection, obj_query, obj, meta, req.upsert) if self.replicate_on_write: # also do update to extra datacentres for extra_client in self.extra_clients: extra_collection = extra_client[req.database][req.collection] dc_util.update_message(extra_collection, obj_query, obj, meta, req.upsert) return str(obj_id), altered
def query_messages_ros_srv(self, req): """ Returns t """ collection = self._mongo_client[req.database][req.collection] # build the query doc obj_query = self.to_query_dict(req.message_query, req.meta_query) # restrict results to have the type asked for obj_query["_meta.stored_type"] = req.type # TODO start using some string constants! rospy.logdebug("query document: %s", obj_query) # this is a list of entries in dict format including meta sort_query_dict = dc_util.string_pair_list_to_dictionary( req.sort_query) sort_query_tuples = [] for k, v in iteritems(sort_query_dict): try: sort_query_tuples.append((k, int(v))) except ValueError: sort_query_tuples.append((k, v)) # this is a list of entries in dict format including meta projection_query_dict = dc_util.string_pair_list_to_dictionary( req.projection_query) projection_meta_dict = dict() projection_meta_dict["_meta"] = 1 entries = dc_util.query_message(collection, obj_query, sort_query_tuples, projection_query_dict, req.single, req.limit) if projection_query_dict: meta_entries = dc_util.query_message(collection, obj_query, sort_query_tuples, projection_meta_dict, req.single, req.limit) # keep trying clients until we find an answer if self.replicate_on_write: for extra_client in self.extra_clients: if len(entries) == 0: extra_collection = extra_client[req.database][ req.collection] entries = dc_util.query_message(extra_collection, obj_query, sort_query_tuples, projection_query_dict, req.single, req.limit) if projection_query_dict: meta_entries = dc_util.query_message( extra_collection, obj_query, sort_query_tuples, projection_meta_dict, req.single, req.limit) if len(entries) > 0: rospy.loginfo("found result in extra datacentre") else: break serialised_messages = () metas = () for idx, entry in enumerate(entries): # load the class object for this type # TODO this should be the same for every item in the list, so could reuse cls = dc_util.load_class(entry["_meta"]["stored_class"]) # instantiate the ROS message object from the dictionary retrieved from the db message = dc_util.dictionary_to_message(entry, cls) # the serialise this object in order to be sent in a generic form serialised_messages = serialised_messages + ( dc_util.serialise_message(message), ) # add ObjectID into meta as it might be useful later if projection_query_dict: entry["_meta"]["_id"] = meta_entries[idx]["_id"] else: entry["_meta"]["_id"] = entry["_id"] # serialise meta metas = metas + (StringPairList([ StringPair( dc_srv.MongoQueryMsgRequest.JSON_QUERY, json.dumps(entry["_meta"], default=json_util.default)) ]), ) return [serialised_messages, metas]