예제 #1
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Merge one or more bag files with the possibilities of filtering topics.'
    )
    parser.add_argument('outputbag',
                        help='Output bag file with topics merged.')
    parser.add_argument('inputbag', nargs='+', help='Input bag files.')
    parser.add_argument('-v',
                        '--verbose',
                        action="store_true",
                        default=False,
                        help='Verbose output.')
    parser.add_argument(
        '-t',
        '--topics',
        default="*",
        help=
        'String interpreted as a list of topics (wildcards \'*\' and \'?\' allowed) to include in the merged bag file.'
    )

    args = parser.parse_args()

    topics = args.topics.split(' ')

    total_included_count = 0
    total_skipped_count = 0

    if (args.verbose):
        print("Writing bag file: " + args.outputbag)
        print("Matching topics against patters: '%s'" % ' '.join(topics))

    with Bag(args.outputbag, 'w') as o:
        for ifile in args.inputbag:
            matchedtopics = []
            included_count = 0
            skipped_count = 0
            if (args.verbose):
                print("> Reading bag file: " + ifile)
            with Bag(ifile, 'r') as ib:
                for topic, msg, t in ib:
                    if any(fnmatchcase(topic, pattern) for pattern in topics):
                        if not topic in matchedtopics:
                            matchedtopics.append(topic)
                            if (args.verbose):
                                print("Including matched topic '%s'" % topic)
                        o.write(topic, msg, t)
                        included_count += 1
                    else:
                        skipped_count += 1
            total_included_count += included_count
            total_skipped_count += skipped_count
            if (args.verbose):
                print("< Included %d messages and skipped %d" %
                      (included_count, skipped_count))

    if (args.verbose):
        print("Total: Included %d messages and skipped %d" %
              (total_included_count, total_skipped_count))
예제 #2
0
def process_bag(bag_in_fn, bag_out_fn, conf_file_fn):
    bag_in = Bag(bag_in_fn)
    bag_out = Bag(bag_out_fn, 'w')

    include_rules, exclude_rules, time_rules = read_rules(conf_file_fn)
    topic_rules = include_rules + exclude_rules

    # Set the time UNIX time at the start of the bag file
    for r in topic_rules:
        r.set_begin_time(bag_in.get_start_time())
    for r in time_rules:
        r.set_begin_time(bag_in.get_start_time())

    # Find start and end times that are actually required
    t_start, t_end = get_begin_end(time_rules, bag_in)
    # Find topics that are actually required
    bag_topics = bag_in.get_type_and_topic_info().topics.keys()
    topics = get_topics(topic_rules, bag_topics)

    messages = bag_in.read_messages(topics=topics,
                                    start_time=t_start,
                                    end_time=t_end,
                                    return_connection_header=True)
    for topic, msg, t, conn_header in messages:
        # Check default enforcement for this message
        if FilterRule.is_tf_topic(topic):
            default = FilterRule.DEFAULT_ENFORCEMENT_TF
        else:
            default = FilterRule.DEFAULT_ENFORCEMENT_TOPIC

        # When default is to include, only check whether the exclusion
        # rules are satisfied, and if all of them are ok, write it out
        if default == FilterRule.INCLUDE:
            # Check exclusions
            ok = True
            for r in exclude_rules:
                if r.msg_match(topic, msg, t):
                    ok = False
        # When default is to exclude, check if the message matches any
        # of the inclusion rules and write it out if it does
        else:
            # Check inclusions
            ok = False
            for r in include_rules:
                if r.msg_match(topic, msg, t):
                    ok = True

        # Time rules
        time_ok = True
        for r in time_rules:
            if not r.is_ok_with(topic, msg, t):
                time_ok = False

        # Write to file
        if ok and time_ok:
            bag_out.write(topic, msg, t, connection_header=conn_header)

    bag_out.close()
def merge():

	# Setting arguments
	parser = argparse.ArgumentParser(description='Merge one or more bag files with the possibilities of filtering topics.')
	parser.add_argument('outputbag', help='output bag file with topics merged')
	parser.add_argument('inputbag', nargs='+', help='input bag files')
	parser.add_argument('-v', '--verbose', action="store_true", default=False, help='verbose output')
	parser.add_argument('-t', '--topics', default="*", help='string interpreted as a list of topics (wildcards \'*\' and \'?\' allowed) to include in the merged/filtered bag file')
	parser.add_argument('-nt', '--new_topic', default=False, help='store all the potentially found topics with this "new_topic" name in the merged/filtered bag file')
	args = parser.parse_args()

	# Varialbe initialisation
	topics = args.topics.split(' ')
	total_included_count = 0
	total_skipped_count = 0

	if (args.verbose):
		print("Writing bag file: " + args.outputbag)
		print("Matching topics against patters: '%s'" % ' '.join(topics))

	# Start the process
	with Bag(args.outputbag, 'w') as o:
		for ifile in args.inputbag:
			matchedtopics = []
			included_count = 0
			skipped_count = 0

			if (args.verbose):
				print("> Reading bag file: " + ifile)

			# Read each potentially available bag file
			with Bag(ifile, 'r') as ib:
				for topic, msg, t in ib:
					if any(fnmatchcase(topic, pattern) for pattern in topics):
						if not topic in matchedtopics:
							matchedtopics.append(topic)
							if (args.verbose):
								print("Including matched topic '%s'" % topic)

						if args.new_topic: # If required, store with new topic name
							o.write(args.new_topic, msg, t)
						else: # otherwise, continue as it is
							o.write(topic, msg, t)
						included_count += 1
					else:
						skipped_count += 1

			total_included_count += included_count
			total_skipped_count += skipped_count
			if (args.verbose):
				print("< Included %d messages and skipped %d" % (included_count, skipped_count))

	if (args.verbose):
		print("Total: Included %d messages and skipped %d" % (total_included_count, total_skipped_count))
예제 #4
0
def remap(input_bag_path, output_bag_path):
    """
    Remaps the topics of a single bag
    :param input_bag_path: the bag with the topic that should be renamed
    :param output_bag_path: the path where the output bag should be created
    """
    with Bag(output_bag_path, 'w') as output_bag:  # create output bag
        # write all topics into the output bag and rename the specified one:
        for topic, msg, t in Bag(input_bag_path):
            if topic == '/image_raw':
                output_bag.write('/camera/image_proc', msg, t)
            else:
                output_bag.write(topic, msg, t)
예제 #5
0
def RenameTopics(bag_file, location, collect_type, car_name):
    print("Renaming topics...")
    new_bag_file = bag_file + "_" + location + "_" + collect_type + "_" + car_name + ".bag"

    velodyne_name = '/vlp16_1/velodyne_points'
    new_velodyne_name = '/points_raw'

    with Bag(new_bag_file, 'w') as Y:
        for topic, msg, t in Bag(bag_file + ".bag"):
            if topic == velodyne_name:
                Y.write(new_velodyne_name, msg, t)
            else:
                Y.write(topic, msg, t)
예제 #6
0
def main():

    ifile = './mvsec_data_bags/outdoor_day2_events.bag'
    write_bag = Bag(ifile, 'w')
    print('write bag opened')

    ofile = './mvsec_data_bags/outdoor_day2_data.bag'
    read_bag = Bag(ofile, 'r')
    print('read bag opened')

    iteration = 1
    for topic, msg, t in read_bag:
        if ('events' in topic):
            write_bag.write(topic, msg, t)
            iteration = iteration + 1
            print(iteration, topic)
예제 #7
0
        def incoming_ask(ask_msg):
            rospy.loginfo('Incoming ask:\n' + str(ask_msg))
            if ask_msg.model == self.model and ask_msg.token == self.token:
                rospy.loginfo('Incoming ask with right model and token')

                with TemporaryDirectory() as tmpdir:
                    rospy.logdebug('Temporary directory created: %s', tmpdir)
                    os.chdir(tmpdir)
                    rospy.loginfo('Waiting for objective file from IPFS...')
                    try:
                        self.ipfs.get(ask_msg.objective, timeout=60)
                        rospy.logdebug('Objective is written to %s', tmpdir + '/' + ask_msg.objective)
                        bag = Bag(str(tmpdir + '/' + ask_msg.objective))
                        msg_type = list(bag.get_type_and_topic_info()[1].values())[0][0]
                        if msg_type != Mission()._type:
                            rospy.logwarn('Wrong message type in objective topic')
                            return
                    except ipfsapi.exceptions.TimeoutError:
                        rospy.logwarn('Timeout of waiting objective file: ' + str(ask_msg.objective))
                        rospy.logwarn('Skip incoming ask')
                        return

                self.make_bid(ask_msg)
            else:
                rospy.logwarn('Incoming ask with wrong model and token, skip')
예제 #8
0
 def _assert_bag_valid(self,
                       filename,
                       topics=None,
                       start_time=None,
                       stop_time=None):
     '''
     Open the bagfile at the specified filename and read it to ensure topic limits were
     enforced and the optional topic list and start/stop times are also enforced.
     '''
     bag = Bag(filename)
     topics_dict = bag.get_type_and_topic_info()[1]
     bag_topics = set(topics_dict.keys())
     param_topics = set(self.topic_limits.keys())
     if topics:
         self.assertEqual(bag_topics, set(topics))
     self.assertTrue(bag_topics.issubset(param_topics))
     for topic in topics_dict:
         size = topics_dict[
             topic].message_count * 8  # Calculate stored message size as each message is 8 bytes
         count = topics_dict[topic].message_count
         gen = bag.read_messages(topics=topic)
         _, _, first_time = next(gen)
         last_time = first_time  # in case the next for loop does not execute
         if start_time:
             self.assertGreaterEqual(first_time, start_time)
         for _, _, last_time in gen:  # Read through all messages so last_time is valid
             pass
         if stop_time:
             self.assertLessEqual(last_time, stop_time)
         duration = last_time - first_time
         self._assert_limits_enforced(topic, duration, size, count)
예제 #9
0
def load_trajectory_from_bag(file, fast=True, fast_only=False):
    traj = []
    fast_traj = []

    bag = Bag(file)

    normal_topics = ['raven_state', 'raven_state/array']
    normal_topics = normal_topics + ['/' + topic for topic in normal_topics]

    fast_topic = ['raven_state/1000Hz']
    #fast_topic = fast_topic + ['/' + topic for topic in fast_topic]

    if fast_only:
        for topic, msg, t in bag.read_messages(topics=fast_topic):
            fast_traj.append(msg)
        return get_trajectory(fast_traj)

    for topic, msg, t in bag.read_messages(topics=normal_topics):
        traj.append(msg)

    if fast:
        for topic, msg, t in bag.read_messages(topics=fast_topic):
            fast_traj.append(msg)

    if fast_traj:
        return get_trajectory(fast_traj, traj)
    else:
        return get_trajectory(traj)
예제 #10
0
def process_bag_file(file: Union[str, BinaryIO],
                     topic_to_cb: Mapping[str, Callable[[ROSMsgT], DataPoint]]) \
        -> DistTrace:
    """
    Process messages from bag files

    Notes
    -----
    Only the timestamp of messages from the same topic is guaranteed to be monotonically increasing.
    Be careful when merging messages from different topics.
    """
    ret_dist_trace = {topic: []
                      for topic in topic_to_cb.keys()
                      }  # type: Dict[str, List[StampedDataPoint]]
    with Bag(file) as bag:
        for topic, msg, t in bag.read_messages(topics=topic_to_cb.keys()):
            if hasattr(msg, "header"):
                t = msg.header.stamp  # Use sender's timestamp if available
            t_nsec = t.to_nsec()  # type: int
            quo, rem = divmod(t_nsec, (5**6))
            if t_nsec > MAX_PRECISE_INT or rem != 0:
                raise RuntimeWarning(
                    "Timestamp %s nano-sec may lose precision when converted to milli-sec"
                    "in floating point." % str(t))
            t_msec_f = float(quo) / (2**6)

            if topic in topic_to_cb:
                dp = topic_to_cb[topic](msg)
                ret_dist_trace[topic].append(StampedDataPoint(t_msec_f, dp))
            else:
                continue  # Ignore other topics

    return ret_dist_trace
def get_info(bag_file, topic_filter=None):
    bag = Bag(bag_file)
    topics = bag.get_type_and_topic_info().topics
    for topic in topics:
        if topic_filter and topics[topic].msg_type not in topic_filter:
            continue
        print("{}: {} Hz".format(topic, round(topics[topic].frequency, 3)))
        print(topics[topic].message_count)
        times = np.ones(shape=bag.get_message_count(topic_filters=topic))
        n = 0
        for _, msg, t in bag.read_messages(topics=topic):
            times[n] = msg.header.stamp.to_sec()
            n += 1
        times = 1 / np.gradient(times)
        times = times[np.where((times > np.percentile(times, 10)) & (times < np.percentile(times, 90)))]
        print("mean: {}, median: {}".format(np.mean(times), np.median(times)))
        print("min: {}, max: {}".format(np.min(times), np.max(times)))
        # plt.scatter(times, np.gradient(times))
        plt.hist(times)
        plt.yscale("log")
        plt.title("{}: {}".format(os.path.basename(bag_file), topic))
        if not os.path.exists("images"):
            os.makedirs("images")
        plt.savefig(os.path.join("images/", "{}.{}.png".format(os.path.basename(bag_file), topic.replace("/", ""))))
        plt.cla()
예제 #12
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--bag",
                        dest="bag",
                        help="Path to ROS bag.",
                        required=True)
    parser.add_argument("--left_image_0",
                        help="Path to 'left_image_00000.png'")
    parser.add_argument("--right_image_0",
                        help="Path to 'right_image_00000.png'")
    parser.add_argument("--start_time",
                        help="A start time to check",
                        type=float,
                        default=None)
    args = parser.parse_args()

    bridge = CvBridge()
    bag = Bag(args.bag)
    t_start = bag.get_start_time()
    n_msgs = 0

    if args.left_image_0 is None and args.right_image_0 is None:
        raise ValueError(
            "You must provide either left_image_0 or right_image_0")
    isLeft = args.left_image_0 is not None
    ref_img = args.left_image_0 if isLeft else args.right_image_0
    print("Using ref image %s" % ref_img)
    ref_image_0 = cv2.imread(ref_img)[:, :, 0]
    assert ref_image_0 is not None

    topic = '/davis/left/image_raw' if isLeft else '/davis/right/image_raw'
    if args.start_time:
        topic, msg, t = next(
            bag.read_messages(topics=[topic],
                              start_time=rospy.Time(args.start_time +
                                                    t_start)))
        width = msg.width
        height = msg.height
        image = np.asarray(bridge.imgmsg_to_cv2(msg, msg.encoding))
        image = np.reshape(image, (height, width))

        print("Correct? {}".format(bool(np.all(np.isclose(image,
                                                          ref_image_0)))))

    else:
        for topic, msg, t in bag.read_messages(topics=[topic]):
            n_msgs += 1
            width = msg.width
            height = msg.height
            image = np.asarray(bridge.imgmsg_to_cv2(msg, msg.encoding))
            image = np.reshape(image, (height, width))

            if bool(np.all(np.isclose(image, ref_image_0))):
                print("Ref image found at {} ({} from start)".format(
                    t.to_sec(),
                    t.to_sec() - t_start))
                return

        print("Processed {} images, ref not found!".format(n_msgs))
예제 #13
0
def process_bag(bag_in_fn, bag_out_fn, conf_file_fn):
    bag_in = Bag(bag_in_fn)
    bag_out = Bag(bag_out_fn, 'w')
    topic_rules, tf_rules = RenameRule.parse(conf_file_fn)

    messages = bag_in.read_messages(return_connection_header=True)
    for topic, msg, t, conn_header in messages:
        if topic == '/tf' or topic == '/tf_static':
            new_topic = topic
            new_msg = modify_tf(msg, tf_rules)
        else:
            new_topic = modify_topic(topic, topic_rules)
            # Modify the frame_id in header if header exists
            new_msg = modify_msg(msg, tf_rules)
        bag_out.write(new_topic, new_msg, t, connection_header=conn_header)

    bag_in.close()
    bag_out.close()
예제 #14
0
    def import_schema(self, schema, srv_options, options, restriction_type,
                      restricts):
        Bag = import_bag(srv_options)
        pcid_str = options.pop('pcid', srv_options.pop('pcid', 0))
        pcid = int(pcid_str)
        patch_column = options.pop('patch_column',
                                   srv_options.pop('patch_column', 'points'))
        patch_columns = options.pop('patch_columns', '*').strip()
        patch_columns = [
            col.strip() for col in patch_columns.split(',') if col.strip()
        ]
        filename = srv_options.pop('rosbag_path', "") + options.pop(
            'rosbag_path', "") + schema
        bag = Bag(filename, 'r')

        tablecols = []
        topics = bag.get_type_and_topic_info().topics
        pcid_for_topic = {k: pcid + 1 + i for i, k in enumerate(topics.keys())}
        pointcloud_formats = True
        if restriction_type is 'limit':
            topics = {k: v for k, v in topics.items() if k in restricts}
            pointcloud_formats = 'pointcloud_formats' in restricts
        elif restriction_type is 'except':
            topics = {k: v for k, v in topics.items() if k not in restricts}
            pointcloud_formats = 'pointcloud_formats' not in restricts

        tabledefs = []
        if pointcloud_formats:
            tablecols = [
                ColumnDefinition('pcid', type_name='integer'),
                ColumnDefinition('srid', type_name='integer'),
                ColumnDefinition('schema', type_name='text'),
                ColumnDefinition('format', type_name='text'),
                ColumnDefinition('rostype', type_name='text'),
                ColumnDefinition('columns', type_name='text[]'),
                ColumnDefinition('ply_header', type_name='text'),
            ]
            tableopts = {
                'metadata': 'true',
                'rosbag': schema,
                'pcid': pcid_str
            }
            tabledefs.append(
                TableDefinition("pointcloud_formats",
                                columns=tablecols,
                                options=tableopts))

        for topic, infos in topics.items():
            pcid = pcid_for_topic[topic]
            columns, _, _, _, _, _ = get_columns(bag, topic, infos, pcid,
                                                 patch_column, patch_columns)
            tablecols = [get_column_def(k, *v) for k, v in columns.items()]
            tableopts = {'topic': topic, 'rosbag': schema, 'pcid': str(pcid)}
            tabledefs.append(
                TableDefinition(topic, columns=tablecols, options=tableopts))
        return tabledefs
예제 #15
0
    def _read_info(self):
        """Read some metadata from inside this rosbag."""
        from rosbag import Bag, ROSBagUnindexedException, ROSBagException
        try:
            b = Bag(self.path)
        except ROSBagUnindexedException:
            b = Bag(self.path, allow_unindexed=True)
            print('Reindexing', self.filename)
            b.reindex()

        try:
            duration = b.get_end_time() - b.get_start_time()
        except ROSBagException:
            duration = 0
        return {
            'n_messages': b.get_message_count(),
            'duration': duration,
            'topic_list': b.get_type_and_topic_info()[1].keys()
        }
예제 #16
0
def ipfs_download(multihash):
    rospy.wait_for_service('/liability/infochan/ipfs/get_file')
    download = rospy.ServiceProxy('/liability/infochan/ipfs/get_file',
                                  IpfsDownloadFile)
    with NamedTemporaryFile(delete=False) as tmpfile:
        res = download(multihash, Filepath(tmpfile.name))
        if not res.success:
            raise Exception(res.error_msg)
        messages = {}
        for topic, msg, timestamp in Bag(tmpfile.name, 'r').read_messages():
            messages[topic] = msg
        return messages
예제 #17
0
    def _get_objective(self, lia: Liability) -> dict:
        mhash = lia.objective.multihash
        tempdir = gettempdir()
        os.chdir(tempdir)

        # temp_obj = NamedTemporaryFile(delete=False)
        self.ipfs.get(mhash)

        messages = {}
        for topic, msg, timestamp in Bag(mhash, 'r').read_messages():
            messages[topic] = msg
        return messages
def ipfs_download(multihash):
    tempdir = gettempdir()
    os.chdir(tempdir)

    temp_obj = NamedTemporaryFile(delete=False)

    res = ipfs_download_file(connect(), multihash.multihash, temp_obj.name)
    if not res:
        raise Exception("Can't download objective")
    messages = {}
    for topic, msg, timestamp in Bag(temp_obj.name, 'r').read_messages():
        messages[topic] = msg
    return messages
예제 #19
0
def ipfs_upload(messages):
    rospy.wait_for_service('/liability/infochan/ipfs/add_file')
    upload = rospy.ServiceProxy('/liability/infochan/ipfs/add_file',
                                IpfsUploadFile)
    with NamedTemporaryFile(delete=False) as tmpfile:
        recorder = Bag(tmpfile.name, 'w')
        for topic in messages:
            recorder.write('/{}'.format(topic), messages[topic])
        recorder.close()
        res = upload(Filepath(tmpfile.name))
        if not res.success:
            raise Exception(res.error_msg)
        return res.ipfs_address
예제 #20
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Align timestamps of several topics to that of a specified topic.')
    parser.add_argument('inputbag', help='input bag file')
    parser.add_argument('outputbag', help='output bag file')
    parser.add_argument('correct_timestamp_topic',
                        help='topics with timestamps we wish to align to')
    parser.add_argument('-ta',
                        '--topics_to_align',
                        nargs='+',
                        help='topics to align')

    args = parser.parse_args()

    print(args.topics_to_align)

    count = 0
    correct_timestamps = []
    with Bag(args.outputbag, 'w') as ob:
        for topic, msg, t in Bag(args.inputbag, 'r').read_messages():
            if topic == args.correct_timestamp_topic:
                correct_timestamps.append(t)
            # for topic_unaligned in topics_to_align:
            print("Reading", topic, t)

        for topic, msg, t in Bag(args.inputbag, 'r').read_messages():
            # print("Writing", topic, t)
            if topic in args.topics_to_align:
                print("Writing", topic, t, " aligning timestamp from",
                      msg.header.stamp, " to", correct_timestamps[count])
                msg.header.stamp = correct_timestamps[count]
                ob.write(topic, msg, correct_timestamps[count])
                count += 1
            else:
                ob.write(topic, msg, t)
예제 #21
0
def create_objective(period: str, stream_id: str, email: str) -> str:
    ipfs = connect()
    tempdir = gettempdir()
    os.chdir(tempdir)

    with NamedTemporaryFile(delete=False) as f:
        bag = Bag(f.name, 'w')
        bag.write('/period', String(period))
        bag.write('/stream_id', String(stream_id))
        bag.write('/email', String(email))
        bag.close()

        res = ipfs.add(f.name)
        rospy.loginfo("Hash of the objective is {}".format(res['Hash']))
        return res['Hash']
예제 #22
0
def analyze_time_stamps(bag_name, freq):
    print 'finding adjust time and bad time stamps ...'
    ts = defaultdict(list)

    with Bag(bag_name, 'r') as inbag:
        min_time_diff = rospy.Duration(1e10)
        t0 = time.time()
        start_time = inbag.get_start_time()
        end_time = inbag.get_end_time()
        total_time = end_time - start_time
        last_percent = 0
        print "total time span to be processed: %.2fs" % total_time
        for topic, msg, t in inbag.read_messages():
            percent_done = (t.to_sec() - start_time) / total_time * 100
            if (percent_done - last_percent > 5.0):
                last_percent = percent_done
                t1 = time.time()
                time_left = (100 - percent_done) * (t1 - t0) / percent_done
                print ("percent done: %5.0f, expected time " + \
                       "remaining: %5.0fs") % (percent_done, time_left)
            if hasattr(msg, 'header'):
                ts[topic].append(msg.header.stamp)
                dt = t - msg.header.stamp
                if dt < min_time_diff:
                    min_time_diff = dt
                    print 'min time diff: ', min_time_diff.to_sec()
            if rospy.is_shutdown():
                break
    print 'time shift: ', min_time_diff.to_sec()
    bad_stamps = []
    bad_diffs = []
    for tp in ('/fla/ovc_node/left/image_raw',
               '/fla/ovc_node/right/image_raw'):
        tss = np.asarray(ts[tp])
        tsd = np.diff(tss)
        idx = np.where((tsd > rospy.Duration(1.5 * 1.0 / freq))
                       | (tsd < rospy.Duration(0.1 * 1.0 / freq)))
        # add 1 to index to skip *subsequent* frame
        idxp = [i + 1 for i in idx]
        bad_stamps = bad_stamps + (tss[idxp]).tolist()
        bad_diffs = bad_diffs + (tsd[idx]).tolist()

    for i in range(0, len(bad_stamps)):
        print 'bad time stamp: %f   dt = %f' % (bad_stamps[i].to_sec(),
                                                bad_diffs[i].to_sec())

    return min_time_diff, sorted(bad_stamps)
예제 #23
0
def ipfs_download(multihash: Multihash) -> (dict, Bag):
    rospy.wait_for_service('/ipfs/get_file')
    download = rospy.ServiceProxy('/ipfs/get_file', IpfsDownloadFile)
    tmpfile = NamedTemporaryFile(delete=False)
    res = download(multihash, Filepath(tmpfile.name))
    tmpfile.close()
    if not res.success:
        raise Exception(res.error_msg)
    messages = {}
    bag = Bag(tmpfile.name, 'r')
    for topic, msg, timestamp in bag.read_messages():
        if topic not in messages:
            messages[topic] = [msg]
        else:
            messages[topic].append(msg)
    os.unlink(tmpfile.name)
    return (messages, bag)
예제 #24
0
def writepc2frombag2file(input_bagfile, pc2_topic, out_file_path,start_time=None, end_time=None, msg_count=None):
    try:
        output_file_name = get_output_file_name(input_bagfile) + '_xyz.txt'
        if out_file_path.endswith('/'):
            output_file = out_file_path + output_file_name
        else:
            output_file = out_file_path+'/' +  output_file_name
        
        output_file_fh = open(output_file,'w')
        print('writing output_file at', output_file)

        # check start and end time condition 
        if start_time is not None and end_time is not None:
            assert (end_time - start_time) > 0 , "end_time should be higher than start time"

        if msg_count is None:
            use_msg_count = False
        else:
            use_msg_count = True
            # msg_count is already static casted in arg parse, just checking again for sanity ad if function is re used individially
            msg_count = int(msg_count)
            assert msg_count > 0, "should have positive msg_count"
        
        input_bag = Bag(input_bagfile,'r')
        print('bag load success')
        
        count = 0;
        for topic, msg, ts in input_bag.read_messages(topics=[pc2_topic],start_time=start_time, end_time=end_time):

            output_file_fh.write('msg_timestamp: %f\n' % msg.header.stamp.to_sec())
            output_file_fh.write('%d\n' %msg.width)
            for data_pts in pc2.read_points(msg, field_names=("x", "y", "z", "intensity"), skip_nans=True):
                output_file_fh.write('%f %f %f %d\n' % (data_pts[0], data_pts[1], data_pts[2], data_pts[3]))
            
            count +=1
            print(count,msg_count)         
            if use_msg_count and count >= msg_count:
                break

        input_bag.close()
        output_file_fh.close()

    except Exception as e_init:
        print(traceback.format_exc(e_init))
예제 #25
0
def pinata_rosbag(ipfs_hash: str) -> str:

    data = requests.get(f'https://gateway.pinata.cloud/ipfs/{ipfs_hash}')
    rospy.loginfo(data.status_code)
    if data.status_code != 200:
        rospy.loginfo(f'Error downloading rosbag! {data.content}')
        return

    else:
        tmpfile = NamedTemporaryFile(delete=False)
        tmpfile.write(data.content)
        bag = Bag(tmpfile.name)

        messages = {}
        for topic, msg, timestamp in bag.read_messages():
            if topic not in messages:
                messages[topic] = [msg]
            else:
                messages[topic].append(msg)

        os.unlink(tmpfile.name)
    return (messages, bag)
#!/usr/bin/env python
#------------------------------------------------------------------------------
#
# append one bag to another one. This is faster than merge, but riskier
#

import rospy
import argparse
from rosbag import Bag

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='append bag2 to bag1.')
    parser.add_argument('bagfile1')
    parser.add_argument('bagfile2')

    args = parser.parse_args(rospy.myargv()[1:])

    with Bag(args.bagfile1, 'a') as inbag1:
        with Bag(args.bagfile2, 'r') as inbag2:
            for topic, msg, t in inbag2.read_messages():
                inbag1.write(topic, msg, t)
            inbag2.close()
        inbag1.close()
예제 #27
0
                        action='store',
                        default=None,
                        type=int,
                        help='chunk threshold in bytes.')

    parser.add_argument('bagfile')

    args = parser.parse_args()

    rospy.init_node('compute_point_cloud')
    depth_topic = args.sensor_topic + '/image_depth'
    points_topic = args.sensor_topic + '/points'
    camera_info = make_caminfo_msg.from_ros(read_yaml(args.camera_info))
    adj = PointCloudAdjuster(camera_info)

    with Bag(args.bagfile, 'r') as inbag:
        cthresh = args.chunk_threshold \
                  if args.chunk_threshold else inbag.chunk_threshold
        print "using chunk threshold: ", cthresh
        with Bag(args.outbag, mode='w', chunk_threshold=cthresh) as outbag:
            t0 = time.time()
            start_time = inbag.get_start_time()
            end_time = inbag.get_end_time()
            total_time = end_time - start_time
            time_to_data = defaultdict(dict)
            last_percent = 0
            print "total time span to be processed: %.2fs" % total_time
            for topic, msg, t in inbag.read_messages():
                percent_done = (t.to_sec() - start_time) / total_time * 100
                if (percent_done - last_percent > 5.0):
                    last_percent = percent_done
예제 #28
0
        nargs='+',
        help='A list of input bag files',
    )
    # add an argument for the optional output file
    PARSER.add_argument(
        '--output_file',
        '-o',
        type=str,
        help='An optional output file to dump to instead of the command line.',
        default=None,
        required=False,
    )
    try:
        # get the arguments from the argument parser
        ARGS = PARSER.parse_args()
        # open the input bag with an automatically closing context
        with Bag(ARGS.input_bag, 'r') as input_bag:
            # if there is an output file path, open the file
            if ARGS.output_file is not None:
                ARGS.output_file = open(ARGS.output_file, 'w')
            # stream the input bag to the output bag
            dump(input_bag, ARGS.topics, ARGS.output_file)
            # if there was an output file, close it
            if ARGS.output_file is not None:
                ARGS.output_file.close()
    except KeyboardInterrupt:
        pass

# explicitly define the outward facing API of this module
__all__ = [dump.__name__]
    def on_enter(self, userdata):
        self._failed = False
        try:
            # Initialization

            # userdata
            self._bag_filename = userdata.bag_filename
            self._trajectories_command = userdata.trajectories_command

            # joint definitions
            l_arm_range = range(16, 23)
            r_arm_range = range(23, 30)
            atlasJointNames = [
                'back_bkz', 'back_bky', 'back_bkx', 'neck_ry', 'l_leg_hpz',
                'l_leg_hpx', 'l_leg_hpy', 'l_leg_kny', 'l_leg_aky',
                'l_leg_akx', 'r_leg_hpz', 'r_leg_hpx', 'r_leg_hpy',
                'r_leg_kny', 'r_leg_aky', 'r_leg_akx', 'l_arm_shz',
                'l_arm_shx', 'l_arm_ely', 'l_arm_elx', 'l_arm_wry',
                'l_arm_wrx', 'l_arm_wry2', 'r_arm_shz', 'r_arm_shx',
                'r_arm_ely', 'r_arm_elx', 'r_arm_wry', 'r_arm_wrx',
                'r_arm_wry2'
            ]
            joint_position_cmd = [0] * len(l_arm_range)
            # starting and stopping time of measurement data to be taken into account for cailbration
            time_start = 0
            time_end = 0
            time_tf = 0

            # gravity vector in world frame
            g_world = np.resize(np.array([0, 0, -9.81]), (3, 1))

            # take this number of data points for each pose.
            max_data_points_per_pose = 100

            # rotation of the force torque sensor in world frame (quaternions)
            ft_rotation = [0, 0, 0, 0]

            # loop over all ft sensors to calibrate

            for chain in self._calibration_chain:
                tf_chain = ['/pelvis', 'ltorso', 'mtorso', 'utorso']
                if chain == 'left_arm':
                    joint_range = l_arm_range
                    tfname = 'l_hand'
                    tf_chain.extend([
                        'l_clav', 'l_scap', 'l_uarm', 'l_larm', 'l_ufarm',
                        'l_lfarm', 'l_hand'
                    ])
                elif chain == 'right_arm':
                    joint_range = r_arm_range
                    tf_chain.extend([
                        'r_clav', 'r_scap', 'r_uarm', 'r_larm', 'r_ufarm',
                        'r_lfarm', 'r_hand'
                    ])

                    tfname = 'r_hand'
                else:
                    Logger.logwarn(
                        'CalculateForceTorqueCalibration: Undefined chain %s',
                        chain)

                # initialize transformations
                tf_data = [
                ]  # frame to frame transformation with numerical indexes from the tf_chain
                tf_data_cum = [
                ]  # world to frame transformation with numerical indexes from the tf_chain

                for i in range(len(tf_chain)):
                    tf_data.append([0, 0, 0, 0])
                    tf_data_cum.append([0, 0, 0, 0])

                # get number of poses from the commanded trajectories. 2 Points per Pose
                number_of_poses = len(
                    self._trajectories_command[chain].points) / 2
                # define information matrix for calibration
                InfMat = np.zeros(
                    (6 * max_data_points_per_pose * number_of_poses, 10))
                InfMat_i = 0  # Index in this Matrix
                # definie measurement vector for calibration
                MeasVec = np.zeros(
                    (6 * max_data_points_per_pose * number_of_poses, 1))

                # commanded joint trajectory for current chain
                # all commanded positions in this trajectory
                current_traj_cmd = self._trajectories_command[chain]

                # flag if the time interval was already set:
                timesetflag = False

                point_index_cmd = 0  # start with index 0
                transformation_available = False

                # read time series from bag file
                # check in desired input trajectory, at which time a position is reached
                # take the data in a defined time period after the commanded new position
                bag_from_robot = Bag(os.path.expanduser(
                    self._bag_filename))  # open bag file
                Logger.loginfo(
                    'CalculateForceTorqueCalibration: Calibrate %s from %s. Using %d different positions from trajectory command. Expecting 2 commanded positions per pose'
                    %
                    (chain, self._bag_filename, len(current_traj_cmd.points)))
                for topic, msg, t in bag_from_robot.read_messages(topics=[
                        '/flor/controller/atlas_state', '/tf',
                        '/flor/controller/joint_command'
                ]):
                    # Check if all desired poses have been reached
                    if point_index_cmd > len(current_traj_cmd.points) - 1:
                        break
                    current_position_cmd = current_traj_cmd.points[
                        point_index_cmd].positions
                    ####################################################
                    # Check tf message: remember last transformation from world to ft sensor
                    if topic == '/tf':
                        # loop over all transformations inside this tf message
                        for j in range(len(msg.transforms)):
                            data = msg.transforms[j]
                            tr = data.transform  # current transformation
                            header = data.header
                            # check if frame matches on of the saved frames
                            for i in range(len(tf_chain)):
                                if data.child_frame_id == tf_chain[i]:
                                    tf_data[i] = np.array([
                                        tr.rotation.x, tr.rotation.y,
                                        tr.rotation.z, tr.rotation.w
                                    ])

                        # calculate the newest transformation to the force torque sensor
                        tf_data_cum[0] = tf_data[0]
                        for i in range(1, len(tf_chain)):
                            tf_data_cum[
                                i] = q = tf.transformations.quaternion_multiply(
                                    tf_data_cum[i - 1], tf_data[i])
                            if i == len(tf_chain) - 1:
                                time_tf = msg.transforms[
                                    0].header.stamp.to_sec()
                        # nothing to do with tf message
                        # check if all transformations are available. (up to the last link)
                        if np.any(
                                tf_data_cum[-1]
                        ):  # real part of quaternion unequal to zero: data exists
                            transformation_available = True
                        continue
                    else:
                        time_msg = msg.header.stamp.to_sec()

                    ####################################################
                    # check if timestamp is interesting. Then check the
                    if not (time_msg > time_start
                            and time_msg < time_end) and not timesetflag:
                        # the timestamp is not in evaluation interval. Look for reaching of the pose
                        # record data (see below)
                        if topic == '/flor/controller/joint_command':
                            # get the commanded position
                            for i in range(len(joint_range)):
                                osrf_ndx = joint_range[i]
                                joint_position_cmd[i] = msg.position[osrf_ndx]

                # Check if position command matches the currently expected commanded position
                            pos_reached = True

                            for i in range(len(joint_range)):
                                if abs(joint_position_cmd[i] -
                                       current_position_cmd[i]) > 0.001:
                                    pos_reached = False
                                    break
                            if pos_reached:
                                # end time of the values regarded for calibration data: take the latest time possible  for this pose
                                time_end = msg.header.stamp.to_sec(
                                ) + self._settlingtime
                                # starting time for calibration: Take 100ms
                                time_start = time_end - 0.1
                                data_points_for_this_pose = 0
                                timesetflag = True
                                # take the next point next time. Each pose consists of two trajectory points (one for reaching, one for settling).
                                # take the second one
                                point_index_cmd = point_index_cmd + 2
                            if not pos_reached:
                                # The commanded position has not been reached. Skip
                                continue
                        continue  # continue here, because data aquisition is triggered by the time_start, time_end

                    if time_msg > time_end:
                        timesetflag = False  # prepare for new evaluation interval

                    ####################################################
                    # check if enough datapoints for this pose have been collected
                    if data_points_for_this_pose > max_data_points_per_pose:
                        # already enough data points for this pose
                        continue

            ####################################################
            # Check if message is atlas_state
            # IF this is the case, fill information matrix
                    if topic != '/flor/controller/atlas_state':
                        continue

                    # Extract measured force and torque
                    if chain == 'left_arm':
                        FT = [
                            msg.l_hand.force.x, msg.l_hand.force.y,
                            msg.l_hand.force.z, msg.l_hand.torque.x,
                            msg.l_hand.torque.y, msg.l_hand.torque.z
                        ]
                    elif chain == 'right_arm':
                        FT = [
                            msg.r_hand.force.x, msg.r_hand.force.y,
                            msg.r_hand.force.z, msg.r_hand.torque.x,
                            msg.r_hand.torque.y, msg.r_hand.torque.z
                        ]

                    # calculate gravitation vector
                    if not transformation_available:
                        Logger.logwarn(
                            'No tf messages available at time %1.4f.' %
                            time_msg)
                        continue

                    R = tf.transformations.quaternion_matrix(tf_data_cum[-1])
                    g = np.dot((R[0:3, 0:3]).transpose(), g_world)

                    gx = g[0]
                    gy = g[1]
                    gz = g[2]

                    # fill information matrix for this data point. Source: [1], equ. (7)
                    M = np.zeros((6, 10))
                    M[0, 0] = gx
                    M[1, 0] = gy
                    M[2, 0] = gz

                    M[3, 2] = gz
                    M[3, 3] = -gy
                    M[4, 1] = -gz
                    M[4, 3] = gx
                    M[5, 1] = gy
                    M[5, 2] = -gx
                    M[0, 4] = 1.0
                    M[1, 5] = 1.0
                    M[2, 6] = 1.0
                    M[3, 7] = 1.0
                    M[4, 8] = 1.0
                    M[5, 9] = 1.0

                    # fill big information matrix and vector (stack small information matrizes)
                    for i in range(6):
                        for j in range(10):
                            InfMat[InfMat_i * 6 + i, j] = M[i, j]
                        MeasVec[InfMat_i * 6 + i, 0] = FT[i]

                    InfMat_i = InfMat_i + 1  # increase index
                    data_points_for_this_pose = data_points_for_this_pose + 1

                # shorten big information matrix
                if InfMat_i < max_data_points_per_pose * number_of_poses:
                    InfMat_calc = InfMat[0:(6 * InfMat_i) - 1, :]
                    MeasVec_calc = MeasVec[0:(6 * InfMat_i - 1), :]
                else:
                    InfMat_calc = InfMat
                    MeasVec_calc = MeasVec

                # calculate calibration data
                if chain in self._static_calibration_data.keys(
                ):  # calculate calibration with given static parameters
                    # bring colums with first parameters on the other side of the equation
                    if len(self._static_calibration_data[chain]) != 4:
                        Logger.logwarn(
                            "CalculateForceTorqueCalibration: Given static calibration data for %s has length %d. Required 4 entries."
                            % (chain, len(k_fix)))
                        self._failed = True
                        return
                    # convert physical parameters to identification parameters (mass, 1st moment)
                    m = self._static_calibration_data[chain][0]
                    if m == 0:
                        mom_x = 0
                        mom_y = 0
                        mom_z = 0
                    elif m > 0:
                        mom_x = self._static_calibration_data[chain][1] / m
                        mom_y = self._static_calibration_data[chain][2] / m
                        mom_z = self._static_calibration_data[chain][3] / m
                    else:
                        Logger.logwarn(
                            "CalculateForceTorqueCalibration: Negative mass (%f) for calibration requested. Abort."
                            % mass)
                        self._failed = True
                        return
                    k_fix = np.resize(np.array([m, mom_x, mom_y, mom_z]),
                                      (4, 1))
                    Logger.loginfo(
                        "CalculateForceTorqueCalibration:static calibration data for %s given: %s. Reduce equation system"
                        % (chain, str(self._static_calibration_data[chain])))
                    MeasVec_calc_corr = np.subtract(
                        np.array(MeasVec_calc),
                        np.dot(InfMat_calc[:, 0:4], k_fix))
                    InfMat_calc_corr = InfMat_calc[:, 4:
                                                   10]  # only the last 6 colums which correspond to the sensor offsets
                    k = np.linalg.lstsq(
                        InfMat_calc_corr,
                        MeasVec_calc_corr)[0]  # solve reduced equation system
                    k_calibration = self._static_calibration_data[chain]
                    k_calibration.extend(k)
                else:  # calculate normally with all parameters unknown
                    k = np.linalg.lstsq(InfMat_calc, MeasVec_calc)[0]
                    # convert to physical parameters (first moment -> mass)
                    if k[0] > 0:
                        k_calibration = [
                            k[0], k[1] / k[0], k[2] / k[0], k[3] / k[0], k[4],
                            k[5], k[6], k[7], k[8], k[9]
                        ]
                    else:
                        Logger.loginfo(
                            "CalculateForceTorqueCalibration:Calibration brought negative mass %f"
                            % k[0])
                        k_calibration = [
                            0.0, 0.0, 0.0, 0.0, k[4], k[5], k[6], k[7], k[8],
                            k[9]
                        ]

                Logger.loginfo(
                    "CalculateForceTorqueCalibration:calibration data for %s" %
                    chain)
                Logger.loginfo("CalculateForceTorqueCalibration:mass: %f" %
                               float(k_calibration[0]))
                Logger.loginfo(
                    "CalculateForceTorqueCalibration:center of mass: %f %f %f"
                    % (k_calibration[1], k_calibration[2], k_calibration[3]))
                Logger.loginfo(
                    "CalculateForceTorqueCalibration:F offset: %f %f %f" %
                    (k_calibration[4], k_calibration[5], k_calibration[6]))
                Logger.loginfo(
                    "CalculateForceTorqueCalibration:M offset: %f %f %f" %
                    (k_calibration[7], k_calibration[8], k_calibration[9]))
                self._ft_calib_data[chain] = k_calibration

                bag_from_robot.close()

            userdata.ft_calib_data = self._ft_calib_data
            Logger.loginfo(
                'CalculateForceTorqueCalibration:Calibration finished')
            self._done = True

        except Exception as e:
            Logger.logwarn(
                'CalculateForceTorqueCalibration:Unable to calculate calibration:\n%s'
                % str(e))
            self._failed = True
예제 #30
0
def main():
    parser = argparse.ArgumentParser(
        description=("Extracts grayscale and event images from a ROS bag and "
                     "saves them as TFRecords for training in TensorFlow."))
    parser.add_argument("--bag", dest="bag",
                        help="Path to ROS bag.",
                        required=True)
    parser.add_argument("--prefix", dest="prefix",
                        help="Output file prefix.",
                        required=True)
    parser.add_argument("--output_folder", dest="output_folder",
                        help="Output folder.",
                        required=True)
    parser.add_argument("--max_aug", dest="max_aug",
                        help="Maximum number of images to combine for augmentation.",
                        type=int,
                        default=6)
    parser.add_argument("--n_skip", dest="n_skip",
                        help="Maximum number of images to combine for augmentation.",
                        type=int,
                        default=1)
    parser.add_argument("--start_time", dest="start_time",
                        help="Time to start in the bag.",
                        type=float,
                        default=0.0)
    parser.add_argument("--end_time", dest="end_time",
                        help="Time to end in the bag.",
                        type=float,
                        default=-1.0)
    parser.add_argument("--save_rgb_images", default=True,
                        const=False, nargs="?")
    parser.add_argument("--debug", default=False,
                        const=True, nargs="?")
    parser.add_argument("--whitelist_imageids_txt",
                        type=str,
                        default=None)

    args = parser.parse_args()

    bridge = CvBridge()

    n_msgs = 0
    left_start_event_offset = 0
    right_start_event_offset = 0

    left_event_image_iter = 0
    right_event_image_iter = 0
    left_image_iter = 0
    right_image_iter = 0
    first_left_image_time = -1
    first_right_image_time = -1

    left_events = []
    right_events = []
    left_images = []
    right_images = []
    left_image_times = []
    right_image_times = []
    left_event_count_images = []
    left_event_time_images = []
    left_event_image_times = []

    right_event_count_images = []
    right_event_time_images = []
    right_event_image_times = []

    left_image_event_start_idx = []
    left_image_event_end_idx = []
    right_image_event_start_idx = []
    right_image_event_end_idx = []

    whitelist_imageids = None
    if args.whitelist_imageids_txt is not None:
        with open(args.whitelist_imageids_txt, 'r') as fp:
            whitelist_imageids = fp.read().splitlines()
        whitelist_imageids = [int(l) for l in whitelist_imageids]

    cols = 346
    rows = 260
    print("Processing bag")
    bag = Bag(args.bag)
    h5_left, h5_right = None, None
    if args.debug:
        import h5py
        h5_file = h5py.File(args.bag[:-len("bag")]+"hdf5")
        h5_left = h5_file['davis']['left']['events']
        h5_right = h5_file['davis']['right']['events']

    options = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.GZIP)
    left_tf_writer = tf.python_io.TFRecordWriter(
        os.path.join(args.output_folder, args.prefix, "left_event_images.tfrecord"),
        options=options)
    right_tf_writer = tf.python_io.TFRecordWriter(
        os.path.join(args.output_folder, args.prefix, "right_event_images.tfrecord"),
        options=options)

    # Get actual time for the start of the bag.
    t_start = bag.get_start_time()
    t_start_ros = rospy.Time(t_start)
    # Set the time at which the bag reading should end.
    if args.end_time == -1.0:
        t_end = bag.get_end_time()
    else:
        t_end = t_start + args.end_time

    for topic, msg, t in bag.read_messages(
            topics=['/davis/left/image_raw',
                    '/davis/right/image_raw',
                    '/davis/left/events',
                    '/davis/right/events'],
            # **** MOD **** #
            # NOTE: we always start reading from the start in order
            #       to count the number of events that have to be
            #       discarded in the HDF5 file
            # start_time=rospy.Time(args.start_time + t_start),
            end_time=rospy.Time(t_end)):
        # Check to make sure we're working with stereo messages.
        if not ('left' in topic or 'right' in topic):
            print('ERROR: topic {} does not contain left or right, is this stereo?'
                  'If not, you will need to modify the topic names in the code.'.
                  format(topic))
            return

        n_msgs += 1
        if n_msgs % 500 == 0:
            print("Processed {} msgs, {} images, time is {}.".format(n_msgs,
                                                                     left_event_image_iter,
                                                                     t.to_sec() - t_start))

        isLeft = 'left' in topic
        # **** MOD **** # /*start
        # If we are still not reading the part
        # we are interested in, we just count
        # the number of events
        if t.to_sec() < args.start_time + t_start:
            if 'events' in topic and msg.events:
                if isLeft:
                    left_start_event_offset += len(msg.events)
                else:
                    right_start_event_offset += len(msg.events)
            continue  # read the next msg
        # **** MOD **** # end*/

        if 'image' in topic:
            width = msg.width
            height = msg.height
            if width != cols or height != rows:
                print("Image dimensions are not what we expected: set: ({} {}) vs  got:({} {})"
                      .format(cols, rows, width, height))
                return
            time = msg.header.stamp
            image = np.asarray(bridge.imgmsg_to_cv2(msg, msg.encoding))
            image = np.reshape(image, (height, width))

            if isLeft:
                if whitelist_imageids is None or left_image_iter in whitelist_imageids:
                    if args.save_rgb_images:
                        cv2.imwrite(os.path.join(args.output_folder,
                                                 args.prefix,
                                                 "left_image{:05d}.png".format(left_image_iter)),
                                    image)
                    if left_image_iter > 0:
                        left_image_times.append(time)
                    else:
                        first_left_image_time = time
                        left_event_image_times.append(time.to_sec())
                left_image_iter += 1
            else:
                if whitelist_imageids is None or right_image_iter in whitelist_imageids:
                    if args.save_rgb_images:
                        cv2.imwrite(os.path.join(args.output_folder,
                                                 args.prefix,
                                                 "right_image{:05d}.png".format(right_image_iter)),
                                    image)
                    if right_image_iter > 0:
                        right_image_times.append(time)
                    else:
                        first_right_image_time = time
                        right_event_image_times.append(time.to_sec())
                right_image_iter += 1
        elif 'events' in topic and msg.events:
            for event in msg.events:
                ts = event.ts
                event = [event.x,
                         event.y,
                         (ts - t_start_ros).to_sec(),
                         (float(event.polarity) - 0.5) * 2]
                if isLeft:
                    if first_left_image_time != -1 and ts > first_left_image_time:
                        left_events.append(event)
                    else:
                        left_start_event_offset += 1
                else:
                    if first_right_image_time != -1 and ts > first_right_image_time:
                        right_events.append(event)
                    else:
                        right_start_event_offset += 1
            if isLeft:
                if len(left_image_times) >= args.max_aug and \
                        left_events[-1][2] > (left_image_times[args.max_aug - 1] - t_start_ros).to_sec():
                    left_event_image_iter, consumed = _save_events(left_events,
                                                                   left_image_times,
                                                                   left_event_count_images,
                                                                   left_event_time_images,
                                                                   left_event_image_times,
                                                                   left_start_event_offset,
                                                                   left_image_event_start_idx,
                                                                   left_image_event_end_idx,
                                                                   rows,
                                                                   cols,
                                                                   args.max_aug,
                                                                   args.n_skip,
                                                                   left_event_image_iter,
                                                                   args.prefix,
                                                                   'left',
                                                                   left_tf_writer,
                                                                   t_start_ros,
                                                                   h5_left,
                                                                   whitelist_imageids)
                    left_start_event_offset += consumed
            else:
                if len(right_image_times) >= args.max_aug and \
                        right_events[-1][2] > (right_image_times[args.max_aug - 1] - t_start_ros).to_sec():
                    right_event_image_iter, consumed = _save_events(right_events,
                                                                    right_image_times,
                                                                    right_event_count_images,
                                                                    right_event_time_images,
                                                                    right_event_image_times,
                                                                    right_start_event_offset,
                                                                    right_image_event_start_idx,
                                                                    right_image_event_end_idx,
                                                                    rows,
                                                                    cols,
                                                                    args.max_aug,
                                                                    args.n_skip,
                                                                    right_event_image_iter,
                                                                    args.prefix,
                                                                    'right',
                                                                    right_tf_writer,
                                                                    t_start_ros,
                                                                    h5_right,
                                                                    whitelist_imageids)
                    right_start_event_offset += consumed

    left_tf_writer.close()
    right_tf_writer.close()

    image_counter_file = open(os.path.join(args.output_folder, args.prefix, "n_images.txt"), 'w')
    image_counter_file.write("{} {}".format(left_event_image_iter, right_event_image_iter))
    image_counter_file.close()