def test_get_collection_metadata(self): doc = DBUtils.get_collection_metadata(db_name=self.test_db_name, collection_name='ros_ropod_cmd_vel') del doc['_id'] self.assertDictEqual( doc, { 'collection_name': 'ros_ropod_cmd_vel', 'ros': { 'msg_type': 'geometry_msgs/Twist', 'direct_msg_mapping': True, 'topic_name': '/ropod/cmd_vel'} })
def __init__(self, **kwargs): self.black_box_db_name = kwargs.get('db_name', 'logs') self.sync_time = kwargs.get('sync_time', True) self.time_step = kwargs.get('time_step', 1.0) self.sleep_duration = kwargs.get('sleep_duration', 0.5) actual_start_time = DBUtils.get_db_oldest_timestamp( self.black_box_db_name) actual_stop_time = DBUtils.get_db_newest_timestamp( self.black_box_db_name) self.start_timestamp = kwargs.get('start_time', actual_start_time) self.stop_timestamp = kwargs.get('stop_time', actual_stop_time) if actual_start_time > self.start_timestamp or \ actual_stop_time < self.stop_timestamp or \ self.start_timestamp > self.stop_timestamp : print( "WARNING: Incorrect start or stop time. Using default duration" ) self.start_timestamp = actual_start_time self.stop_timestamp = actual_stop_time self.current_time = self.start_timestamp self.status = "PAUSED" self.topic_managers = [] self.topic_manager_threads = [] data_collections = DBUtils.get_data_collection_names( self.black_box_db_name) # create list of locks for syncing (each for one topic) self.locks = [ multiprocessing.Lock() for collection in data_collections ] # create list of queues to access global current time (maintained by syncronizer) self.queues = [ multiprocessing.Queue() for collection in data_collections ] self.queues.append( multiprocessing.Queue()) # for current(parent) process sync_pause_conn, self.rosbag_pause_conn = multiprocessing.Pipe( duplex=True) # create syncronizer object and assign it to a thread self.sync = Syncronizer(self.start_timestamp, self.locks, self.queues, sync_pause_conn, time_step=self.time_step, sleep_duration=self.sleep_duration) self.sync_thread = multiprocessing.Process( target=self.sync.increment_time, daemon=True) # create topic_utils object and assign it to a thread for each topic for i, collection in enumerate(data_collections): collection_metadata = DBUtils.get_collection_metadata( self.black_box_db_name, collection) topic_manager = TopicUtils( collection_metadata['ros']['topic_name'], collection_metadata['ros']['msg_type'], collection_metadata['ros']['direct_msg_mapping']) self.topic_managers.append(topic_manager) data_cursor = DBUtils.get_doc_cursor(self.black_box_db_name, collection, self.start_timestamp, self.stop_timestamp) data_thread = multiprocessing.Process( target=topic_manager.publish_data, kwargs={ 'dict_msgs': data_cursor, 'sync_time': self.sync_time, 'global_clock_start': self.start_timestamp, 'lock': self.locks[i], 'queue': self.queues[i] }, daemon=True) # data_thread.daemon = True self.topic_manager_threads.append(data_thread) for data_thread in self.topic_manager_threads: data_thread.start() self.sync_thread.start()