コード例 #1
0
def get_info(bag_file, topic_filter=None):
    bag = Bag(bag_file)
    topics = bag.get_type_and_topic_info().topics
    for topic in topics:
        if topic_filter and topics[topic].msg_type not in topic_filter:
            continue
        print("{}: {} Hz".format(topic, round(topics[topic].frequency, 3)))
        print(topics[topic].message_count)
        times = np.ones(shape=bag.get_message_count(topic_filters=topic))
        n = 0
        for _, msg, t in bag.read_messages(topics=topic):
            times[n] = msg.header.stamp.to_sec()
            n += 1
        times = 1 / np.gradient(times)
        times = times[np.where((times > np.percentile(times, 10)) & (times < np.percentile(times, 90)))]
        print("mean: {}, median: {}".format(np.mean(times), np.median(times)))
        print("min: {}, max: {}".format(np.min(times), np.max(times)))
        # plt.scatter(times, np.gradient(times))
        plt.hist(times)
        plt.yscale("log")
        plt.title("{}: {}".format(os.path.basename(bag_file), topic))
        if not os.path.exists("images"):
            os.makedirs("images")
        plt.savefig(os.path.join("images/", "{}.{}.png".format(os.path.basename(bag_file), topic.replace("/", ""))))
        plt.cla()
コード例 #2
0
def d8n_get_all_images_topic_bag(bag: rosbag.Bag, min_messages: int = 0) -> List[Tuple[str, type]]:
    """
    Returns the (name, type) of all topics that look like images
    and that have nonzero message count.
    """
    tat = bag.get_type_and_topic_info()
    consider_images = [
        "sensor_msgs/Image",
        "sensor_msgs/CompressedImage",
    ]
    all_types = set()
    found = []
    topics = tat.topics
    for t, v in list(topics.items()):
        msg_type = v.msg_type
        all_types.add(msg_type)
        message_count = v.message_count
        if msg_type in consider_images:

            # quick fix: ignore image_raw if we have image_compressed version
            if "raw" in t:
                other = t.replace("raw", "compressed")

                if other in topics:
                    continue

            if message_count < min_messages:
                # print('ignoring topic %r because message_count = 0' % t)
                continue

            found.append((t, msg_type))
    return found
コード例 #3
0
ファイル: agent.py プロジェクト: tuuzdu/de_robonomics
        def incoming_ask(ask_msg):
            rospy.loginfo('Incoming ask:\n' + str(ask_msg))
            if ask_msg.model == self.model and ask_msg.token == self.token:
                rospy.loginfo('Incoming ask with right model and token')

                with TemporaryDirectory() as tmpdir:
                    rospy.logdebug('Temporary directory created: %s', tmpdir)
                    os.chdir(tmpdir)
                    rospy.loginfo('Waiting for objective file from IPFS...')
                    try:
                        self.ipfs.get(ask_msg.objective, timeout=60)
                        rospy.logdebug('Objective is written to %s', tmpdir + '/' + ask_msg.objective)
                        bag = Bag(str(tmpdir + '/' + ask_msg.objective))
                        msg_type = list(bag.get_type_and_topic_info()[1].values())[0][0]
                        if msg_type != Mission()._type:
                            rospy.logwarn('Wrong message type in objective topic')
                            return
                    except ipfsapi.exceptions.TimeoutError:
                        rospy.logwarn('Timeout of waiting objective file: ' + str(ask_msg.objective))
                        rospy.logwarn('Skip incoming ask')
                        return

                self.make_bid(ask_msg)
            else:
                rospy.logwarn('Incoming ask with wrong model and token, skip')
コード例 #4
0
ファイル: test_snapshot.py プロジェクト: ros/rosbag_snapshot
 def _assert_bag_valid(self,
                       filename,
                       topics=None,
                       start_time=None,
                       stop_time=None):
     '''
     Open the bagfile at the specified filename and read it to ensure topic limits were
     enforced and the optional topic list and start/stop times are also enforced.
     '''
     bag = Bag(filename)
     topics_dict = bag.get_type_and_topic_info()[1]
     bag_topics = set(topics_dict.keys())
     param_topics = set(self.topic_limits.keys())
     if topics:
         self.assertEqual(bag_topics, set(topics))
     self.assertTrue(bag_topics.issubset(param_topics))
     for topic in topics_dict:
         size = topics_dict[
             topic].message_count * 8  # Calculate stored message size as each message is 8 bytes
         count = topics_dict[topic].message_count
         gen = bag.read_messages(topics=topic)
         _, _, first_time = next(gen)
         last_time = first_time  # in case the next for loop does not execute
         if start_time:
             self.assertGreaterEqual(first_time, start_time)
         for _, _, last_time in gen:  # Read through all messages so last_time is valid
             pass
         if stop_time:
             self.assertLessEqual(last_time, stop_time)
         duration = last_time - first_time
         self._assert_limits_enforced(topic, duration, size, count)
コード例 #5
0
def process_bag(bag_in_fn, bag_out_fn, conf_file_fn):
    bag_in = Bag(bag_in_fn)
    bag_out = Bag(bag_out_fn, 'w')

    include_rules, exclude_rules, time_rules = read_rules(conf_file_fn)
    topic_rules = include_rules + exclude_rules

    # Set the time UNIX time at the start of the bag file
    for r in topic_rules:
        r.set_begin_time(bag_in.get_start_time())
    for r in time_rules:
        r.set_begin_time(bag_in.get_start_time())

    # Find start and end times that are actually required
    t_start, t_end = get_begin_end(time_rules, bag_in)
    # Find topics that are actually required
    bag_topics = bag_in.get_type_and_topic_info().topics.keys()
    topics = get_topics(topic_rules, bag_topics)

    messages = bag_in.read_messages(topics=topics,
                                    start_time=t_start,
                                    end_time=t_end,
                                    return_connection_header=True)
    for topic, msg, t, conn_header in messages:
        # Check default enforcement for this message
        if FilterRule.is_tf_topic(topic):
            default = FilterRule.DEFAULT_ENFORCEMENT_TF
        else:
            default = FilterRule.DEFAULT_ENFORCEMENT_TOPIC

        # When default is to include, only check whether the exclusion
        # rules are satisfied, and if all of them are ok, write it out
        if default == FilterRule.INCLUDE:
            # Check exclusions
            ok = True
            for r in exclude_rules:
                if r.msg_match(topic, msg, t):
                    ok = False
        # When default is to exclude, check if the message matches any
        # of the inclusion rules and write it out if it does
        else:
            # Check inclusions
            ok = False
            for r in include_rules:
                if r.msg_match(topic, msg, t):
                    ok = True

        # Time rules
        time_ok = True
        for r in time_rules:
            if not r.is_ok_with(topic, msg, t):
                time_ok = False

        # Write to file
        if ok and time_ok:
            bag_out.write(topic, msg, t, connection_header=conn_header)

    bag_out.close()
コード例 #6
0
ファイル: rosbag_.py プロジェクト: zhhongsh/fdw-li3ds
    def import_schema(self, schema, srv_options, options, restriction_type,
                      restricts):
        Bag = import_bag(srv_options)
        pcid_str = options.pop('pcid', srv_options.pop('pcid', 0))
        pcid = int(pcid_str)
        patch_column = options.pop('patch_column',
                                   srv_options.pop('patch_column', 'points'))
        patch_columns = options.pop('patch_columns', '*').strip()
        patch_columns = [
            col.strip() for col in patch_columns.split(',') if col.strip()
        ]
        filename = srv_options.pop('rosbag_path', "") + options.pop(
            'rosbag_path', "") + schema
        bag = Bag(filename, 'r')

        tablecols = []
        topics = bag.get_type_and_topic_info().topics
        pcid_for_topic = {k: pcid + 1 + i for i, k in enumerate(topics.keys())}
        pointcloud_formats = True
        if restriction_type is 'limit':
            topics = {k: v for k, v in topics.items() if k in restricts}
            pointcloud_formats = 'pointcloud_formats' in restricts
        elif restriction_type is 'except':
            topics = {k: v for k, v in topics.items() if k not in restricts}
            pointcloud_formats = 'pointcloud_formats' not in restricts

        tabledefs = []
        if pointcloud_formats:
            tablecols = [
                ColumnDefinition('pcid', type_name='integer'),
                ColumnDefinition('srid', type_name='integer'),
                ColumnDefinition('schema', type_name='text'),
                ColumnDefinition('format', type_name='text'),
                ColumnDefinition('rostype', type_name='text'),
                ColumnDefinition('columns', type_name='text[]'),
                ColumnDefinition('ply_header', type_name='text'),
            ]
            tableopts = {
                'metadata': 'true',
                'rosbag': schema,
                'pcid': pcid_str
            }
            tabledefs.append(
                TableDefinition("pointcloud_formats",
                                columns=tablecols,
                                options=tableopts))

        for topic, infos in topics.items():
            pcid = pcid_for_topic[topic]
            columns, _, _, _, _, _ = get_columns(bag, topic, infos, pcid,
                                                 patch_column, patch_columns)
            tablecols = [get_column_def(k, *v) for k, v in columns.items()]
            tableopts = {'topic': topic, 'rosbag': schema, 'pcid': str(pcid)}
            tabledefs.append(
                TableDefinition(topic, columns=tablecols, options=tableopts))
        return tabledefs
コード例 #7
0
    def process_bag(self, bag: rosbag.Bag) -> None:
        info = bag.get_type_and_topic_info()
        bag_topics = set(info.topics.keys())

        print('Bag Name: {}'.format(bag.filename))
        size = len(max(self.topic_set.keys(), key=len)) + 1
        for mode, topics in self.topic_set.items():
            if bag_topics.issubset(topics):
                print(('{:<' + str(size) + '} --SUBSET by {} topics').format(
                    mode + ':', len(topics - bag_topics), size=size))
            elif bag_topics.issuperset(topics):
                print(('{:<' + str(size) + '} ++SUPERSET').format(mode + ':',
                                                                  size=size))
            else:
                print(('{:<' + str(size) + '} off by {}').format(
                    mode + ':', len(topics - bag_topics), size=size))
        print('-------------------')
コード例 #8
0
ファイル: rosbag_.py プロジェクト: LI3DS/fdw-pointcloud
    def import_schema(self, schema, srv_options, options,
                      restriction_type, restricts):
        Bag = import_bag(srv_options)
        pcid_str = options.pop('pcid', srv_options.pop('pcid', 0))
        pcid = int(pcid_str)
        patch_column = options.pop('patch_column', srv_options.pop('patch_column', 'points'))
        patch_columns = options.pop('patch_columns', '*').strip()
        patch_columns = [col.strip() for col in patch_columns.split(',') if col.strip()]
        filename = srv_options.pop('rosbag_path', "") + options.pop('rosbag_path', "") + schema
        bag = Bag(filename, 'r')

        tablecols = []
        topics = bag.get_type_and_topic_info().topics
        pcid_for_topic = {k: pcid+1+i for i, k in enumerate(topics.keys())}
        pointcloud_formats = True
        if restriction_type is 'limit':
            topics = {k: v for k, v in topics.items() if k in restricts}
            pointcloud_formats = 'pointcloud_formats' in restricts
        elif restriction_type is 'except':
            topics = {k: v for k, v in topics.items() if k not in restricts}
            pointcloud_formats = 'pointcloud_formats' not in restricts

        tabledefs = []
        if pointcloud_formats:
            tablecols = [
                ColumnDefinition('pcid', type_name='integer'),
                ColumnDefinition('srid', type_name='integer'),
                ColumnDefinition('schema', type_name='text'),
                ColumnDefinition('format', type_name='text'),
                ColumnDefinition('rostype', type_name='text'),
                ColumnDefinition('columns', type_name='text[]'),
                ColumnDefinition('ply_header', type_name='text'),
            ]
            tableopts = {'metadata': 'true', 'rosbag': schema, 'pcid': pcid_str}
            tabledefs.append(TableDefinition("pointcloud_formats", columns=tablecols,
                                             options=tableopts))

        for topic, infos in topics.items():
            pcid = pcid_for_topic[topic]
            columns, _, _, _, _, _ = get_columns(bag, topic, infos, pcid,
                                                 patch_column, patch_columns)
            tablecols = [get_column_def(k, *v) for k, v in columns.items()]
            tableopts = {'topic': topic, 'rosbag': schema, 'pcid': str(pcid)}
            tabledefs.append(TableDefinition(topic, columns=tablecols, options=tableopts))
        return tabledefs
コード例 #9
0
    def _read_info(self):
        """Read some metadata from inside this rosbag."""
        from rosbag import Bag, ROSBagUnindexedException, ROSBagException
        try:
            b = Bag(self.path)
        except ROSBagUnindexedException:
            b = Bag(self.path, allow_unindexed=True)
            print('Reindexing', self.filename)
            b.reindex()

        try:
            duration = b.get_end_time() - b.get_start_time()
        except ROSBagException:
            duration = 0
        return {
            'n_messages': b.get_message_count(),
            'duration': duration,
            'topic_list': b.get_type_and_topic_info()[1].keys()
        }
コード例 #10
0
ファイル: rosbag_.py プロジェクト: LI3DS/fdw-pointcloud
class Rosbag(ForeignDataWrapper):
    def __init__(self, options, columns=None):
        super(Rosbag, self).__init__(options, columns)
        Bag = import_bag(options)
        self.filename = options.pop('rosbag_path', "") + options.pop('rosbag')
        self.topic = options.pop('topic', None)
        pointcloud_formats = strtobool(options.pop('metadata', 'false'))

        self.patch_column = options.pop('patch_column', 'points').strip()
        self.patch_columns = options.pop('patch_columns', '*').strip()
        self.patch_columns = [col.strip() for col in self.patch_columns.split(',') if col.strip()]

        self.patch_count_default = int(options.pop('patch_count_default', 1000))
        # 0 => 1 patch per message
        self.patch_count_pointcloud = int(options.pop('patch_count_pointcloud', 0))
        assert(self.patch_count_default > 0)
        assert(self.patch_count_pointcloud >= 0)
        self.pcid = int(options.pop('pcid', 0))
        self.bag = Bag(self.filename, 'r')
        self.topics = self.bag.get_type_and_topic_info().topics
        self.pointcloud_formats = None

        if pointcloud_formats:
            self.pointcloud_formats = []
            topics = self.topic.split(',') if self.topic else self.topics
            for i, topic in enumerate(self.topics):
                if topic not in topics:
                    continue
                infos = self.topics[topic]
                columns, patch_schema, patch_ply_header, _, patch_columns, patch_srid = \
                    get_columns(self.bag, topic, infos, self.pcid+i+1, self.patch_column,
                                self.patch_columns)
                self.pointcloud_formats.append({
                    'pcid': self.pcid+i+1,
                    'srid': patch_srid,
                    'schema': patch_schema,
                    'format': columns[self.patch_column][3],
                    'rostype': infos.msg_type,
                    'columns': patch_columns,
                    'ply_header': patch_ply_header,
                })
            return

        if not self.topic:
            log_to_postgres('"topic" option is required', ERROR)

        self.infos = self.topics[self.topic]
        (self.columns, self.patch_schema, self.patch_ply_header, self.endianness,
         self.patch_columns, self.patch_srid) = \
            get_columns(self.bag, self.topic, self.infos, self.pcid, self.patch_column,
                        self.patch_columns)

        if columns:
            missing = set(columns) - set(self.columns.keys())
            columns = list(c for c in self.columns.keys() if c in columns)
            if missing:
                missing = ", ".join(sorted(missing))
                support = ", ".join(sorted(self.columns.keys()))
                log_to_postgres(
                    "extra unsupported columns : {}".format(missing), WARNING,
                    hint="supported columns : {}".format(support))
            self.columns = {col: self.columns[col] for col in columns}

        if options:
            log_to_postgres("extra unsupported options : {}".format(
                options.keys()), WARNING)

    @classmethod
    def import_schema(self, schema, srv_options, options,
                      restriction_type, restricts):
        Bag = import_bag(srv_options)
        pcid_str = options.pop('pcid', srv_options.pop('pcid', 0))
        pcid = int(pcid_str)
        patch_column = options.pop('patch_column', srv_options.pop('patch_column', 'points'))
        patch_columns = options.pop('patch_columns', '*').strip()
        patch_columns = [col.strip() for col in patch_columns.split(',') if col.strip()]
        filename = srv_options.pop('rosbag_path', "") + options.pop('rosbag_path', "") + schema
        bag = Bag(filename, 'r')

        tablecols = []
        topics = bag.get_type_and_topic_info().topics
        pcid_for_topic = {k: pcid+1+i for i, k in enumerate(topics.keys())}
        pointcloud_formats = True
        if restriction_type is 'limit':
            topics = {k: v for k, v in topics.items() if k in restricts}
            pointcloud_formats = 'pointcloud_formats' in restricts
        elif restriction_type is 'except':
            topics = {k: v for k, v in topics.items() if k not in restricts}
            pointcloud_formats = 'pointcloud_formats' not in restricts

        tabledefs = []
        if pointcloud_formats:
            tablecols = [
                ColumnDefinition('pcid', type_name='integer'),
                ColumnDefinition('srid', type_name='integer'),
                ColumnDefinition('schema', type_name='text'),
                ColumnDefinition('format', type_name='text'),
                ColumnDefinition('rostype', type_name='text'),
                ColumnDefinition('columns', type_name='text[]'),
                ColumnDefinition('ply_header', type_name='text'),
            ]
            tableopts = {'metadata': 'true', 'rosbag': schema, 'pcid': pcid_str}
            tabledefs.append(TableDefinition("pointcloud_formats", columns=tablecols,
                                             options=tableopts))

        for topic, infos in topics.items():
            pcid = pcid_for_topic[topic]
            columns, _, _, _, _, _ = get_columns(bag, topic, infos, pcid,
                                                 patch_column, patch_columns)
            tablecols = [get_column_def(k, *v) for k, v in columns.items()]
            tableopts = {'topic': topic, 'rosbag': schema, 'pcid': str(pcid)}
            tabledefs.append(TableDefinition(topic, columns=tablecols, options=tableopts))
        return tabledefs

    def execute(self, quals, columns):
        if self.pointcloud_formats is not None:
            for f in self.pointcloud_formats:
                yield f
            return
        self.patch_data = ''
        from rospy.rostime import Time
        tmin = None
        tmax = None
        for qual in quals:
            if qual.field_name == "time":
                t = int(qual.value)
                t = Time(t / 1000000000, t % 1000000000)
                if qual.operator in ['=', '>', '>=']:
                    tmin = t
                if qual.operator in ['=', '<', '<=']:
                    tmax = t
        for topic, msg, t in self.bag.read_messages(
                topics=self.topic, start_time=tmin, end_time=tmax):
            for row in self.get_rows(topic, msg, t, columns):
                yield row

        # flush leftover patch data
        if self.patch_data and self.last_row:
            count = int((len(self.patch_data) / self.point_size))
            # in replicating mode, a single leftover point must not be reported
            if count > 1 or self.patch_step_size == self.patch_size:
                res = self.last_row
                if self.patch_column in columns:
                    res[self.patch_column] = hexlify(
                            pack('=b3I', self.endianness, self.pcid, 0, count) + self.patch_data)
                if self.patch_ply_header and 'ply' in columns:
                    self.ply_info['count'] = count
                    res['ply'] = self.patch_ply_header.format(**self.ply_info) + self.patch_data
                yield res

    def get_rows(self, topic, msg, t, columns, toplevel=True):
        if toplevel and len(msg.__slots__) == 1:
            attr = getattr(msg, msg.__slots__[0])
            if isinstance(attr, list):
                for msg in attr:
                    for row in self.get_rows(topic, msg, t, columns, False):
                        yield row
                return
        res = {}
        data_columns = set(columns)
        if self.patch_column in columns:
            data_columns = data_columns.union(self.patch_columns) - set([self.patch_column])
        if "filename" in data_columns:
            res["filename"] = self.filename
        if "topic" in data_columns:
            res["topic"] = topic
        if "time" in data_columns:
            res["time"] = t.to_nsec()
        if self.infos.msg_type == 'sensor_msgs/PointCloud2':
            self.patch_count = self.patch_count_pointcloud or (msg.width*msg.height)
            self.point_size = msg.point_step
            self.patch_size = self.patch_count * self.point_size
            self.patch_step_size = self.patch_size
            self.endianness = 0 if msg.is_bigendian else 1
            data_columns = data_columns - set(['ply', self.patch_column])
            self.patch_data += msg.data

        data_columns = data_columns - set(res.keys())
        for column in data_columns:
            attr = msg
            for col in column.split('.'):
                if isinstance(attr, list):
                    attr = tuple(getattr(a, col) for a in attr)
                else:
                    attr = getattr(attr, col)
            if hasattr(attr, "to_nsec"):
                attr = attr.to_nsec()
            elif hasattr(attr, "x"):
                if hasattr(attr, "w"):
                    attr = (attr.x, attr.y, attr.z, attr.w)
                else:
                    attr = (attr.x, attr.y, attr.z)
            elif isinstance(attr, str):
                fmt = self.columns[column][3]
                if fmt:
                    attr = unpack(fmt, attr)
            res[column] = attr

        if self.patch_column in columns and not self.infos.msg_type == 'sensor_msgs/PointCloud2':
            fmt = self.columns[self.patch_column][3]
            self.patch_count = self.patch_count_default
            self.point_size = calcsize(fmt)
            self.patch_size = self.patch_count * self.point_size
            self.patch_step_size = self.patch_size - self.point_size
            self.patch_data += get_point_data(res, self.patch_columns, fmt)
            res = {k: v for k, v in res.items() if k not in self.patch_columns}

        if not self.patch_data:
            yield res
        else:
            # todo: ensure current res and previous res are equal if there is some leftover
            # patch_data
            while len(self.patch_data) >= self.patch_size:
                data = self.patch_data[0:self.patch_size]
                count = int(self.patch_size / self.point_size)
                res[self.patch_column] = hexlify(
                        pack('=b3I', self.endianness, self.pcid, 0, count) + data)
                if self.patch_ply_header and 'ply' in columns:
                    self.ply_info = {
                        'endianness': 'big' if self.endianness else 'little',
                        'filename': self.filename,
                        'topic': self.topic,
                        'count': count
                    }
                    res['ply'] = self.patch_ply_header.format(**self.ply_info) + data
                self.patch_data = self.patch_data[self.patch_step_size:]
                yield res
            self.last_row = res
コード例 #11
0
ファイル: rosbag_.py プロジェクト: zhhongsh/fdw-li3ds
class Rosbag(ForeignDataWrapper):
    def __init__(self, options, columns=None):
        super(Rosbag, self).__init__(options, columns)
        Bag = import_bag(options)
        self.filename = options.pop('rosbag_path', "") + options.pop('rosbag')
        self.topic = options.pop('topic', None)
        pointcloud_formats = strtobool(options.pop('metadata', 'false'))

        self.patch_column = options.pop('patch_column', 'points').strip()
        self.patch_columns = options.pop('patch_columns', '*').strip()
        self.patch_columns = [
            col.strip() for col in self.patch_columns.split(',')
            if col.strip()
        ]

        self.patch_count_default = int(options.pop('patch_count_default',
                                                   1000))
        # 0 => 1 patch per message
        self.patch_count_pointcloud = int(
            options.pop('patch_count_pointcloud', 0))
        assert (self.patch_count_default > 0)
        assert (self.patch_count_pointcloud >= 0)
        self.pcid = int(options.pop('pcid', 0))
        self.bag = Bag(self.filename, 'r')
        self.topics = self.bag.get_type_and_topic_info().topics
        self.pointcloud_formats = None

        if pointcloud_formats:
            self.pointcloud_formats = []
            topics = self.topic.split(',') if self.topic else self.topics
            for i, topic in enumerate(self.topics):
                if topic not in topics:
                    continue
                infos = self.topics[topic]
                columns, patch_schema, patch_ply_header, _, patch_columns, patch_srid = \
                    get_columns(self.bag, topic, infos, self.pcid+i+1, self.patch_column,
                                self.patch_columns)
                self.pointcloud_formats.append({
                    'pcid':
                    self.pcid + i + 1,
                    'srid':
                    patch_srid,
                    'schema':
                    patch_schema,
                    'format':
                    columns[self.patch_column][3],
                    'rostype':
                    infos.msg_type,
                    'columns':
                    patch_columns,
                    'ply_header':
                    patch_ply_header,
                })
            return

        if not self.topic:
            log_to_postgres('"topic" option is required', ERROR)

        self.infos = self.topics[self.topic]
        (self.columns, self.patch_schema, self.patch_ply_header, self.endianness,
         self.patch_columns, self.patch_srid) = \
            get_columns(self.bag, self.topic, self.infos, self.pcid, self.patch_column,
                        self.patch_columns)

        if columns:
            missing = set(columns) - set(self.columns.keys())
            columns = list(c for c in self.columns.keys() if c in columns)
            if missing:
                missing = ", ".join(sorted(missing))
                support = ", ".join(sorted(self.columns.keys()))
                log_to_postgres(
                    "extra unsupported columns : {}".format(missing),
                    WARNING,
                    hint="supported columns : {}".format(support))
            self.columns = {col: self.columns[col] for col in columns}

        if options:
            log_to_postgres(
                "extra unsupported options : {}".format(options.keys()),
                WARNING)

    @classmethod
    def import_schema(self, schema, srv_options, options, restriction_type,
                      restricts):
        Bag = import_bag(srv_options)
        pcid_str = options.pop('pcid', srv_options.pop('pcid', 0))
        pcid = int(pcid_str)
        patch_column = options.pop('patch_column',
                                   srv_options.pop('patch_column', 'points'))
        patch_columns = options.pop('patch_columns', '*').strip()
        patch_columns = [
            col.strip() for col in patch_columns.split(',') if col.strip()
        ]
        filename = srv_options.pop('rosbag_path', "") + options.pop(
            'rosbag_path', "") + schema
        bag = Bag(filename, 'r')

        tablecols = []
        topics = bag.get_type_and_topic_info().topics
        pcid_for_topic = {k: pcid + 1 + i for i, k in enumerate(topics.keys())}
        pointcloud_formats = True
        if restriction_type is 'limit':
            topics = {k: v for k, v in topics.items() if k in restricts}
            pointcloud_formats = 'pointcloud_formats' in restricts
        elif restriction_type is 'except':
            topics = {k: v for k, v in topics.items() if k not in restricts}
            pointcloud_formats = 'pointcloud_formats' not in restricts

        tabledefs = []
        if pointcloud_formats:
            tablecols = [
                ColumnDefinition('pcid', type_name='integer'),
                ColumnDefinition('srid', type_name='integer'),
                ColumnDefinition('schema', type_name='text'),
                ColumnDefinition('format', type_name='text'),
                ColumnDefinition('rostype', type_name='text'),
                ColumnDefinition('columns', type_name='text[]'),
                ColumnDefinition('ply_header', type_name='text'),
            ]
            tableopts = {
                'metadata': 'true',
                'rosbag': schema,
                'pcid': pcid_str
            }
            tabledefs.append(
                TableDefinition("pointcloud_formats",
                                columns=tablecols,
                                options=tableopts))

        for topic, infos in topics.items():
            pcid = pcid_for_topic[topic]
            columns, _, _, _, _, _ = get_columns(bag, topic, infos, pcid,
                                                 patch_column, patch_columns)
            tablecols = [get_column_def(k, *v) for k, v in columns.items()]
            tableopts = {'topic': topic, 'rosbag': schema, 'pcid': str(pcid)}
            tabledefs.append(
                TableDefinition(topic, columns=tablecols, options=tableopts))
        return tabledefs

    def execute(self, quals, columns):
        if self.pointcloud_formats is not None:
            for f in self.pointcloud_formats:
                yield f
            return
        self.patch_data = ''
        from rospy.rostime import Time
        tmin = None
        tmax = None
        for qual in quals:
            if qual.field_name == "time":
                t = int(qual.value)
                t = Time(t / 1000000000, t % 1000000000)
                if qual.operator in ['=', '>', '>=']:
                    tmin = t
                if qual.operator in ['=', '<', '<=']:
                    tmax = t
        for topic, msg, t in self.bag.read_messages(topics=self.topic,
                                                    start_time=tmin,
                                                    end_time=tmax):
            for row in self.get_rows(topic, msg, t, columns):
                yield row

        # flush leftover patch data
        if self.patch_data and self.last_row:
            count = int((len(self.patch_data) / self.point_size))
            # in replicating mode, a single leftover point must not be reported
            if count > 1 or self.patch_step_size == self.patch_size:
                res = self.last_row
                if self.patch_column in columns:
                    res[self.patch_column] = hexlify(
                        pack('=b3I', self.endianness, self.pcid, 0, count) +
                        self.patch_data)
                if self.patch_ply_header and 'ply' in columns:
                    self.ply_info['count'] = count
                    res['ply'] = self.patch_ply_header.format(
                        **self.ply_info) + self.patch_data
                yield res

    def get_rows(self, topic, msg, t, columns, toplevel=True):
        if toplevel and len(msg.__slots__) == 1:
            attr = getattr(msg, msg.__slots__[0])
            if isinstance(attr, list):
                for msg in attr:
                    for row in self.get_rows(topic, msg, t, columns, False):
                        yield row
                return
        res = {}
        data_columns = set(columns)
        if self.patch_column in columns:
            data_columns = data_columns.union(self.patch_columns) - set(
                [self.patch_column])
        if "filename" in data_columns:
            res["filename"] = self.filename
        if "topic" in data_columns:
            res["topic"] = topic
        if "time" in data_columns:
            res["time"] = t.to_nsec()
        if self.infos.msg_type == 'sensor_msgs/PointCloud2':
            self.patch_count = self.patch_count_pointcloud or (msg.width *
                                                               msg.height)
            self.point_size = msg.point_step
            self.patch_size = self.patch_count * self.point_size
            self.patch_step_size = self.patch_size
            self.endianness = 0 if msg.is_bigendian else 1
            data_columns = data_columns - set(['ply', self.patch_column])
            self.patch_data += msg.data

        data_columns = data_columns - set(res.keys())
        for column in data_columns:
            attr = msg
            for col in column.split('.'):
                if isinstance(attr, list):
                    attr = tuple(getattr(a, col) for a in attr)
                else:
                    attr = getattr(attr, col)
            if hasattr(attr, "to_nsec"):
                attr = attr.to_nsec()
            elif hasattr(attr, "x"):
                if hasattr(attr, "w"):
                    attr = (attr.x, attr.y, attr.z, attr.w)
                else:
                    attr = (attr.x, attr.y, attr.z)
            elif isinstance(attr, str):
                fmt = self.columns[column][3]
                if fmt:
                    attr = unpack(fmt, attr)
            res[column] = attr

        if self.patch_column in columns and not self.infos.msg_type == 'sensor_msgs/PointCloud2':
            fmt = self.columns[self.patch_column][3]
            self.patch_count = self.patch_count_default
            self.point_size = calcsize(fmt)
            self.patch_size = self.patch_count * self.point_size
            self.patch_step_size = self.patch_size - self.point_size
            self.patch_data += get_point_data(res, self.patch_columns, fmt)
            res = {k: v for k, v in res.items() if k not in self.patch_columns}

        if not self.patch_data:
            yield res
        else:
            # todo: ensure current res and previous res are equal if there is some leftover
            # patch_data
            while len(self.patch_data) >= self.patch_size:
                data = self.patch_data[0:self.patch_size]
                count = int(self.patch_size / self.point_size)
                res[self.patch_column] = hexlify(
                    pack('=b3I', self.endianness, self.pcid, 0, count) + data)
                if self.patch_ply_header and 'ply' in columns:
                    self.ply_info = {
                        'endianness': 'big' if self.endianness else 'little',
                        'filename': self.filename,
                        'topic': self.topic,
                        'count': count
                    }
                    res['ply'] = self.patch_ply_header.format(
                        **self.ply_info) + data
                self.patch_data = self.patch_data[self.patch_step_size:]
                yield res
            self.last_row = res