def test_get_doc_cursor(self):
     start_time = 1544441409.88607
     stop_time = 1544441411.28594
     doc_cursor = DBUtils.get_doc_cursor(db_name=self.test_db_name,
                                         collection_name='ros_ropod_cmd_vel',
                                         start_time=start_time,
                                         stop_time=stop_time)
     docs = [doc for doc in doc_cursor]
     self.assertEqual(len(docs), 9)
     for doc in docs:
         self.assertLessEqual(doc['timestamp'], stop_time)
         self.assertGreaterEqual(doc['timestamp'], start_time)
예제 #2
0
    def get_data(self,
                 collection_name,
                 variable_names,
                 start_time=-1,
                 end_time=-1):
        '''Returns a dictionary in which each key is a full variable name
        (namely a variable name of the format "collection_name/variable_name",
        where "variable_name" is a flattened version of a variable stored in the collection)
        and each value is a list of "[timestamp, value]" strings, namely
        each entry in the list corresponds to a value of the variable at
        a particular timestamp (the entries are in string format to allow "value"
        to be of different types)

        Keyword arguments:
        @param collection_name -- name corresponding to a collection from the log database
        @param variable_names -- list of variable names that should be retrieved from the collection
        @param start_time -- a UNIX timestamp in seconds representing the start time
                             for the queried data (default -1, in which case
                             there is no lower bound for the timestamp
                             of the retrieved data)
        @param end_time -- a UNIX timestamp in seconds representing the end time
                           for the queried data (default -1, in which case there is
                           no upper bound for the timestamp of the retrieved data)

        '''
        docs = DBUtils.get_doc_cursor(self.db_name, collection_name,
                                      start_time, end_time)

        var_data = {}
        var_full_names = {}
        for var_name in variable_names:
            full_var_name = '{0}/{1}'.format(collection_name, var_name)
            var_data[full_var_name] = []
            var_full_names[var_name] = full_var_name

        for doc in docs:
            for var_name in variable_names:
                var_value = DataUtils.get_var_value(doc, var_name)
                var_data[var_full_names[var_name]].append('[{0}, {1}]'.format(
                    doc['timestamp'], var_value))
        return var_data
    def __init__(self, **kwargs):
        self.black_box_db_name = kwargs.get('db_name', 'logs')
        self.sync_time = kwargs.get('sync_time', True)
        self.time_step = kwargs.get('time_step', 1.0)
        self.sleep_duration = kwargs.get('sleep_duration', 0.5)
        actual_start_time = DBUtils.get_db_oldest_timestamp(
            self.black_box_db_name)
        actual_stop_time = DBUtils.get_db_newest_timestamp(
            self.black_box_db_name)
        self.start_timestamp = kwargs.get('start_time', actual_start_time)
        self.stop_timestamp = kwargs.get('stop_time', actual_stop_time)
        if actual_start_time > self.start_timestamp or \
                actual_stop_time < self.stop_timestamp or \
                self.start_timestamp > self.stop_timestamp :
            print(
                "WARNING: Incorrect start or stop time. Using default duration"
            )
            self.start_timestamp = actual_start_time
            self.stop_timestamp = actual_stop_time
        self.current_time = self.start_timestamp

        self.status = "PAUSED"

        self.topic_managers = []
        self.topic_manager_threads = []

        data_collections = DBUtils.get_data_collection_names(
            self.black_box_db_name)

        # create list of locks for syncing (each for one topic)
        self.locks = [
            multiprocessing.Lock() for collection in data_collections
        ]
        # create list of queues to access global current time (maintained by syncronizer)
        self.queues = [
            multiprocessing.Queue() for collection in data_collections
        ]
        self.queues.append(
            multiprocessing.Queue())  # for current(parent) process

        sync_pause_conn, self.rosbag_pause_conn = multiprocessing.Pipe(
            duplex=True)

        # create syncronizer object and assign it to a thread
        self.sync = Syncronizer(self.start_timestamp,
                                self.locks,
                                self.queues,
                                sync_pause_conn,
                                time_step=self.time_step,
                                sleep_duration=self.sleep_duration)
        self.sync_thread = multiprocessing.Process(
            target=self.sync.increment_time, daemon=True)

        # create topic_utils object and assign it to a thread for each topic
        for i, collection in enumerate(data_collections):
            collection_metadata = DBUtils.get_collection_metadata(
                self.black_box_db_name, collection)
            topic_manager = TopicUtils(
                collection_metadata['ros']['topic_name'],
                collection_metadata['ros']['msg_type'],
                collection_metadata['ros']['direct_msg_mapping'])
            self.topic_managers.append(topic_manager)

            data_cursor = DBUtils.get_doc_cursor(self.black_box_db_name,
                                                 collection,
                                                 self.start_timestamp,
                                                 self.stop_timestamp)
            data_thread = multiprocessing.Process(
                target=topic_manager.publish_data,
                kwargs={
                    'dict_msgs': data_cursor,
                    'sync_time': self.sync_time,
                    'global_clock_start': self.start_timestamp,
                    'lock': self.locks[i],
                    'queue': self.queues[i]
                },
                daemon=True)
            # data_thread.daemon = True
            self.topic_manager_threads.append(data_thread)
        for data_thread in self.topic_manager_threads:
            data_thread.start()
        self.sync_thread.start()