def test_replication(self): replication_db = "replication_test" replication_col = "replication_test" # connect to destination for replication try: self.assertTrue(wait_for_mongo(ns="/datacentre2"), "wait for mongodb server") dst_client = import_MongoClient()("localhost", 49163) count = dst_client[replication_db][replication_col].count() self.assertEqual(count, 0, "No entry in destination") except pymongo.errors.ConnectionFailure: self.fail("Failed to connect to destination for replication") # insert an entry to move self.assertTrue(wait_for_mongo(), "wait for mongodb server") msg_store = MessageStoreProxy( database=replication_db, collection=replication_col) msg = Wrench() msg_name = "replication test message" self.assertIsNotNone(msg_store.insert_named(msg_name, msg), "inserted message") # move entries rospy.sleep(3) retcode = subprocess.check_call([ get_script_path(), '--move-before', '0', replication_db, replication_col]) self.assertEqual(retcode, 0, "replicator_client returns code 0") # check if replication was succeeded rospy.sleep(3) count = dst_client[replication_db][replication_col].count() self.assertGreater(count, 0, "entry moved to the destination") # test deletion after move data, meta = msg_store.query_named(msg_name, Wrench._type) self.assertIsNotNone(data, "entry is still in source") retcode = subprocess.check_call([ get_script_path(), '--move-before', '0', '--delete-after-move', replication_db, replication_col]) self.assertEqual(retcode, 0, "replicator_client returns code 0") data, meta = msg_store.query_named("replication test", Wrench._type) self.assertIsNone(data, "moved entry is deleted from source")
def __init__(self, db_name='jsk_robot_lifelog', col_name=None, ensure_index=True): super(LoggerBase, self).__init__() self.db_name = rospy.get_param('/robot/database', 'jsk_robot_lifelog') try: if col_name is None: self.col_name = rospy.get_param('/robot/name') else: self.col_name = col_name except KeyError as e: rospy.logerr( "please specify param \"/robot/name\" (e.g. pr1012, olive)") exit(1) self.task_id = None self.msg_store = MessageStoreProxy(database=self.db_name, collection=self.col_name) rospy.loginfo("connected to %s.%s" % (self.db_name, self.col_name)) if ensure_index: try: MongoClient = MU.import_MongoClient() host = rospy.get_param("/mongodb_host") port = rospy.get_param("/mongodb_port") client = MongoClient(host, port) c = client[self.db_name][self.col_name] indices = [ i['key'][0][0] for i in c.index_information().values() ] keys = ["_meta.stored_type", "_meta.inserted_at"] for key in keys: if key not in indices: rospy.loginfo("Creating index for key '%s'" % key) c.ensure_index(key) rospy.loginfo("Created index for key '%s'" % key) client.close() except Exception as e: rospy.logerr("Failed to ensure index: %s" % e)
def test_replication_with_query(self): replication_db = "replication_test_with_query" replication_col = "replication_test_with_query" # connect to destination for replication try: self.assertTrue(wait_for_mongo(ns="/datacentre2"), "wait for mongodb server") dst_client = import_MongoClient()("localhost", 49163) count = dst_client[replication_db][replication_col].count() self.assertEqual(count, 0, "No entry in destination") except pymongo.errors.ConnectionFailure: self.fail("Failed to connect to destination for replication") # insert an entry to move self.assertTrue(wait_for_mongo(), "wait for mongodb server") msg_store = MessageStoreProxy( database=replication_db, collection=replication_col) for i in range(5): msg = Wrench() msg.force.x = i msg_store.insert(msg) msg = Pose() msg.position.x = i msg_store.insert(msg) # move entries with query rospy.sleep(3) query = {'_meta.stored_type': Pose._type} retcode = subprocess.check_call([ get_script_path(), '--move-before', '0', '--query', json_util.dumps(query), replication_db, replication_col]) self.assertEqual(retcode, 0, "replicator_client returns code 0") # check if replication was succeeded rospy.sleep(3) count = dst_client[replication_db][replication_col].count() self.assertEqual(count, 5, "replicated entry exists in destination")
""" Bridges mongo to ros, providing services to query the database. Not efficient, so should be only used when no other option is available; if you can use a mongodb client library like pymongo, use it, if you need to query from client side javascript on a webpage with rosbridge/librosjs then use this. """ import rospy import mongodb_store.util as dc_util from mongodb_store.srv import * import pymongo import bson.json_util import json MongoClient = dc_util.import_MongoClient() class MongoBridge(object): def __init__(self): rospy.init_node("mongo_bridge") have_dc = dc_util.wait_for_mongo() if not have_dc: raise Exception("No Datacentre?") self._mongo_client=pymongo.MongoClient(rospy.get_param("mongodb_host"), rospy.get_param("mongodb_port") ) # advertise ros services for attr in dir(self): if attr.endswith("_ros_srv"):
#!/usr/bin/env python import rospy import mongodb_store_msgs.srv as dc_srv import mongodb_store.util as dc_util import pymongo import json from bson import json_util from mongodb_store_msgs.msg import StringPair, StringPairList from bson.objectid import ObjectId from datetime import * MongoClient = dc_util.import_MongoClient() if __name__ == "__main__": rospy.init_node("replacer") mongo_client = MongoClient(rospy.get_param("mongodb_host"), rospy.get_param("mongodb_port") ) # collection = mongo_client['message_store']['ws_observations'] collection = mongo_client['message_store']['soma'] # field = '_meta.stored_type' # old_value = 'ros_datacentre_msgs/SerialisedMessage' # new_value = 'mongodb_store/SerialisedMessage' # field = '_meta.stored_class' # old_value = 'ros_datacentre_msgs.msg._SerialisedMessage.SerialisedMessage' # new_value = 'mongodb_store.msg._SerialisedMessage.SerialisedMessage' field = 'map' old_value = '/home/gemignani/catkin_workspace/src/spatio-temporal-cues/strands_morse/sapienza/maps/map.yaml'
import rospy import mongodb_store.util as mg_util import sys import time import pymongo from multiprocessing import Process import calendar import datetime import threading import multiprocessing from rosgraph_msgs.msg import Clock import signal import Queue from optparse import OptionParser MongoClient = mg_util.import_MongoClient() TIME_KEY = 'header.stamp' def max_time(collection): return collection.find_one(sort=[(TIME_KEY, pymongo.DESCENDING)])['header']['stamp'] def min_time(collection): return collection.find_one(sort=[(TIME_KEY, pymongo.ASCENDING)])['header']['stamp'] def to_ros_time(dt): return rospy.Time(calendar.timegm(dt.utctimetuple()), dt.microsecond * 1000) def to_datetime(rt): return datetime.datetime.utcfromtimestamp(rt.secs) + datetime.timedelta(microseconds = rt.nsecs / 1000)
import rospy import mongodb_store.util as mg_util import sys import time import pymongo from multiprocessing import Process import calendar import datetime import threading import multiprocessing from rosgraph_msgs.msg import Clock import signal import Queue from optparse import OptionParser MongoClient = mg_util.import_MongoClient() TIME_KEY = 'header.stamp' def max_time(collection): return collection.find_one(sort=[(TIME_KEY, pymongo.DESCENDING)])['header']['stamp'] def min_time(collection): return collection.find_one(sort=[(TIME_KEY, pymongo.ASCENDING)])['header']['stamp'] def to_ros_time(dt):