示例#1
0
import rosbag
from std_msgs.msg import String

bag = rosbag.Bag("objective.bag", "w")

# QmevVxGMRbLgv6grkQxYYSy4NzYnkpojm48QD7ag15uaok - ipfs hash of log file
bag.write("/log", String("QmevVxGMRbLgv6grkQxYYSy4NzYnkpojm48QD7ag15uaok"))
bag.close()

    def convert(self, inputfile, outputfile):

        #TODO : deal with exceptions
        try:
            inCompleteName = os.path.expanduser(inputfile)
            self.f = open(inCompleteName, "r")
        except (IOError, ValueError):
            rospy.logerr("Couldn't open %", inputfile)
            self.print_help()
            exit(-1)

        try:
            outCompleteName = os.path.expanduser(outputfile)
            self.bag = rosbag.Bag(outCompleteName, "w")
        except (IOError, ValueError):
            rospy.logerr("Couldn't open %", outputfile)
            self.print_help()
            exit(-1)

        rospy.loginfo("Reading data from : %s", inCompleteName)
        rospy.loginfo("Writing rosbag to : %s", outCompleteName)
        rospy.loginfo("Start convertion to rosbag")

        self.counter = 0

        for line in self.f:
            words = line.split()

            if self.pause == True:
                while self.pause == True:
                    rospy.sleep(1 / self.rate)

            if self.LASER_MESSAGE_DEFINED.count(words[0]):

                self.fillUpLaserMessage(words)

                topic = self.topics[words[0]]
                self.bag.write(topic, self.laser_msg,
                               self.laser_msg.header.stamp)

                self.laser_msg.header.seq = self.laser_msg.header.seq + 1

            elif self.OLD_LASER_MESSAGE_DEFINED.count(words[0]):

                self.fillUpOldLaserMessage(words)

                topic = self.topics[words[0]]

                if not self.laser_msg.header.stamp.secs == 0:
                    self.bag.write(topic, self.laser_msg,
                                   self.laser_msg.header.stamp)

                    if self.publish_corrected:

                        topic = self.topics["TF"]
                        self.bag.write(topic, self.tf2_msg,
                                       self.laser_msg.header.stamp)

                        topic = self.topics["ODOM"]
                        self.bag.write(topic, self.pose_msg,
                                       self.pose_msg.header.stamp)

                    self.laser_msg.header.seq = self.laser_msg.header.seq + 1
                    self.pose_msg.header.seq = self.pose_msg.header.seq + 1
                    self.tf_odom_robot_msg.header.seq = self.tf_odom_robot_msg.header.seq + 1

            elif self.ROBOT_LASER_MESSAGE_DEFINED.count(words[0]):

                self.fillUpRobotLaserMessage(words)

                topic = self.topics[words[0]]
                self.bag.write(topic, self.laser_msg,
                               self.laser_msg.header.stamp)

                if self.publish_corrected:
                    topic = self.topics["TF"]
                    self.bag.write(topic, self.tf2_msg,
                                   self.laser_msg.header.stamp)

                    topic = self.topics["ODOM"]
                    self.bag.write(topic, self.pose_msg,
                                   self.pose_msg.header.stamp)

                    self.pose_msg.header.seq = self.pose_msg.header.seq + 1
                    self.tf_laser_robot_msg.header.seq = self.tf_laser_robot_msg.header.seq + 1
                    self.tf_odom_robot_msg.header.seq = self.tf_odom_robot_msg.header.seq + 1

                self.laser_msg.header.seq = self.laser_msg.header.seq + 1

            elif self.ODOM_DEFINED == words[0]:

                self.fillUpOdomMessage(words)

                if not self.pose_msg.header.stamp.secs == 0 and not self.publish_corrected:
                    topic = self.topics["ODOM"]
                    self.bag.write(topic, self.pose_msg,
                                   self.pose_msg.header.stamp)

                    topic = self.topics["TF"]
                    self.bag.write(topic, self.tf2_msg,
                                   self.pose_msg.header.stamp)

                    self.pose_msg.header.seq = self.pose_msg.header.seq + 1
                    self.tf_odom_robot_msg.header.seq = self.tf_odom_robot_msg.header.seq + 1

            elif self.TRUEPOS_DEFINED == words[0]:

                self.fillUpTruePoseMessage(words)

                topic = self.topics[words[0]]
                self.bag.write(topic, self.true_pose_msg,
                               self.true_pose_msg.header.stamp)

                topic = self.topics["TF"]
                self.bag.write(topic, self.tf2_msg, self.tf_msg.header.stamp)

                self.pose_msg.header.seq = self.true_pose_msg.header.seq + 1

            elif self.PARAM_DEFINED == words[0]:

                self.param(words)

            else:
                if not self.unknown_entries.count(words[0]):
                    self.unknown_entries.append(words[0])
                    if not words[0] == "#":
                        rospy.logerr("Unknown entry %s !", words[0])

            self.increment_stamp()
            self.tf2_msg = TFMessage()
示例#3
0
def saveTopicVideo(bag_path, topic_name, start_time, end_time, msg_num,
                   frame_width, frame_height, frame_rate, frame_encoding,
                   save_path):
    if frame_encoding.__contains__("rgb8"):
        target_encoding = "bgr8"
    else:
        target_encoding = frame_encoding

    fourcc = cv2.VideoWriter_fourcc(*'XVID')
    out_video = cv2.VideoWriter(save_path, fourcc, frame_rate,
                                (frame_width, frame_height))

    counter = 0

    if frame_encoding == "mono8":
        with rosbag.Bag(bag_path, 'r') as bag:
            bridge = CvBridge()
            for topic, msg, t in bag.read_messages():
                cur_time = msg.header.stamp.to_sec()
                if start_time <= cur_time <= end_time:
                    if topic == topic_name:
                        try:
                            cv_img = bridge.imgmsg_to_cv2(msg, target_encoding)
                            out_video.write(
                                cv2.cvtColor(cv_img, cv2.COLOR_GRAY2BGR))

                            timestr = "%.6f" % msg.header.stamp.to_sec()
                            counter += 1

                            print(
                                str(counter) + os.path.sep + str(msg_num) +
                                ", " + timestr + os.path.sep + str(end_time))
                        except CvBridgeError as e:
                            print(e)
    elif frame_encoding == "rgb8":
        with rosbag.Bag(bag_path, 'r') as bag:
            bridge = CvBridge()
            for topic, msg, t in bag.read_messages():
                cur_time = msg.header.stamp.to_sec()
                if start_time <= cur_time <= end_time:
                    if topic == topic_name:
                        try:
                            cv_img = bridge.imgmsg_to_cv2(msg, target_encoding)
                            out_video.write(cv_img)

                            timestr = "%.6f" % msg.header.stamp.to_sec()
                            counter += 1

                            print(
                                str(counter) + os.path.sep + str(msg_num) +
                                ", " + timestr + os.path.sep + str(end_time))
                        except CvBridgeError as e:
                            print(e)
    elif frame_encoding == "bgr8":
        with rosbag.Bag(bag_path, 'r') as bag:
            bridge = CvBridge()
            for topic, msg, t in bag.read_messages():
                cur_time = msg.header.stamp.to_sec()
                if start_time <= cur_time <= end_time:
                    if topic == topic_name:
                        try:
                            cv_img = bridge.imgmsg_to_cv2(msg, target_encoding)
                            out_video.write(
                                cv2.cvtColor(cv_img, cv2.COLOR_BGR2RGB))

                            timestr = "%.6f" % msg.header.stamp.to_sec()
                            counter += 1

                            print(
                                str(counter) + os.path.sep + str(msg_num) +
                                ", " + timestr + os.path.sep + str(end_time))
                        except CvBridgeError as e:
                            print(e)
    else:
        print("Usupported data format:" + frame_encoding)

    out_video.release()
    print("Video file has been saved at:" + save_path)
def makePlot(test, save):

    

    

    XSqrPts = [0, 14, 14, 0, 0, 14, 14, 0, 0, 17, 17, -1, -2]
    YSqrPts = [0, 0, 4, 4, 8, 8, 12, 12, 16, 16, -2, -2, -3]

    dataSet = test

    data =  {'scircle_1_zero_1_fast': {'xoffset': 0,'yoffset':0,'scale':4.111672661,'file':"zeroDeg/scircle_1_zero_1_fast.bag"}, #scaling factor averaged
              #did not work. has a small straight line going in the wrong direction
             'scircle_1_zero_2_fast': {'xoffset': 0,'yoffset':0,'scale':4.111672661,'file':"zeroDeg/scircle_1_zero_2_fast.bag"}, #scaling factor averaged
              #did not work. has a small straight line going in the wrong direction
             'scircle_1_zero_3':      {'xoffset': 0,'yoffset':0,'scale':4.111672661,'file':"zeroDeg/scircle_1_zero_3.bag"}, #scaling factor averaged
             #did not work. has a weird line that does a uturn
             'scircle_3_zero_1':      {'xoffset': 0.07,'yoffset':7,'scale':-5.329780147,'file':"zeroDeg/scircle_3_zero_1.bag"},
             #worked farily well. only slight drift.
             'scircle_3_zero_2':      {'xoffset': 0.1,'yoffset':7.136,'scale':-5.294506949,'file':"zeroDeg/scircle_3_zero_2.bag"}, 
             #worked farily well. only slight drift.
             'scircle_3_zero_3':      {'xoffset': 0,'yoffset':0,'scale':-5.312143548,'file':"zeroDeg/scircle_3_zero_3.bag"}, #scaling factor averaged 
             #worked well until it completely failed. 
             'scorn_1_zero_1':        {'xoffset': (14-12.65),'yoffset':0,'scale':-4.101963082,'file':"zeroDeg/scorn_1_zero_1.bag"},
             'scorn_1_zero_2':        {'xoffset': (14 - 13.8388),'yoffset':(-0.0331184),'scale':-4.097161253,'file':"zeroDeg/scorn_1_zero_2.bag"}, 
             'scorn_1_zero_3':        {'xoffset': (14 - 13.5169),'yoffset':(-0.023974),'scale':-4.135893648,'file':"zeroDeg/scorn_1_zero_3.bag"},
             'scorn_3_zero_1':        {'xoffset': (14 - 11.6039),'yoffset':(-0.0548116),'scale':-6.021505376,'file':"zeroDeg/scorn_3_zero_1.bag"},
             'scorn_3_zero_2':        {'xoffset': (14 - 13.613),'yoffset':(-0.0566209),'scale':-6.113537118,'file':"zeroDeg/scorn_3_zero_2.bag"}, 
             'scorn_3_zero_3':        {'xoffset': (14 - 13.171),'yoffset':(-0.0503621),'scale':-6.024096386,'file':"zeroDeg/scorn_3_zero_3.bag"},
             'scircle_1_20deg_1':     {'xoffset': 0,'yoffset':0,'scale':1,'file':"20Deg/scircle_1_20deg_1.bag"}, 
             #did not complete and no scaling data
             'scircle_1_20deg_2':     {'xoffset': 0,'yoffset':0,'scale':1,'file':"20Deg/scircle_1_20deg_2.bag"},
             #did not complete and no scaling data
             'scircle_1_20deg_3':     {'xoffset': 0,'yoffset':0,'scale':1,'file':"20Deg/scircle_1_20deg_3.bag"},
             #did not complete and no scaling data
             'scircle_3_20deg_1':     {'xoffset': 0,'yoffset':7.4,'scale':-5.721212468,'file':"20Deg/scircle_3_20deg_1.bag"},
             #worked pretty well
             'scircle_3_20deg_2':     {'xoffset': 0,'yoffset':7.4,'scale':-5.739210285,'file':"20Deg/scircle_3_20deg_2.bag"}, 
             #worked pretty well
             'scircle_3_20deg_3':     {'xoffset': 0,'yoffset':7.45,'scale':-5.730211376,'file':"20Deg/scircle_3_20deg_3.bag"}, #scaling factor averaged
             #failed part wat through but looks desent before that
             'scorn_1_20deg_1':       {'xoffset': 0,'yoffset':0,'scale':1,'file':"20Deg/scorn_1_20deg_1.bag"},
             'scorn_1_20deg_2':       {'xoffset': 0,'yoffset':0,'scale':1,'file':"20Deg/scorn_1_20deg_2.bag"}, 
             'scorn_1_20deg_3':       {'xoffset': 0,'yoffset':0,'scale':1,'file':"20Deg/scorn_1_20deg_3.bag"},
             'scorn_3_20deg_1':       {'xoffset': (14 - 13.56023),'yoffset':(-0.00795469),'scale':-6.636352755,'file':"20Deg/scorn_3_20deg_1.bag"},
             'scorn_3_20deg_2':       {'xoffset': (14 - 13.9493),'yoffset':(-0.0753021),'scale':-6.675518818,'file':"20Deg/scorn_3_20deg_2.bag"}, 
             'scorn_3_20deg_3':       {'xoffset': (14 - 13.103),'yoffset':(-0.028339),'scale':-6.706766744,'file':"20Deg/scorn_3_20deg_3.bag"},      
         }

    px = []
    py = []
    pz = []

    posex = []
    posey = []
    posez = []

    bag = rosbag.Bag(data[dataSet]['file'])

    for topic, msg, t in bag.read_messages(topics=['/svo/pose']):
        #print msg

        posex.append(msg.pose.pose.position.x)
        posey.append(msg.pose.pose.position.y)
        posez.append(msg.pose.pose.position.z)

    # for topic, msg, t in bag.read_messages(topics=['/svo/points']):
    #     #print msg

    #     px.append(msg.pose.position.x)
    #     py.append(msg.pose.position.y)
    #     pz.append(msg.pose.position.z)

    fig3 = plt.figure()
    ax = fig3.add_subplot(111, projection='3d')
    ax.scatter(posex, posey, posez, c='r', marker='o')
    plt.xlabel('x')
    plt.ylabel('y')


    fig4 = plt.figure()
    plt.plot( [x * abs(data[dataSet]['scale']) for x in posey], [x * abs(data[dataSet]['scale']) for x in posez], 'ro')
    offLineX = [3*2.747, -3*2.747] #x
    offLineY = [-3, 3] #y
    plt.plot(offLineX, offLineY, 'b')
    plt.xlabel('Inches')
    plt.ylabel('Inches')
    plt.axis('equal')

    if dataSet[-7:-2] == "20deg":
        print "adjusting angle"
        poseym =  [x/cos(0.34906585) for x in posey]
        posey = poseym

        posezm = [a + b*sin(0.34906585) for a, b in zip(posez, posey)]
        posez = posezm

    
    #ax.scatter(posex, posey, c='b', marker='o')
    fig1 = plt.figure()
    plt.plot( [x * data[dataSet]['scale'] + data[dataSet]['xoffset'] for x in posex], [x * data[dataSet]['scale'] + data[dataSet]['yoffset'] for x in posey], 'ro')


    if dataSet[:3] == "sco" and data[dataSet]['scale'] != 1:
        plt.plot(XSqrPts, YSqrPts, 'b')

    if dataSet[:3] == "sci" and data[dataSet]['scale'] != 1:
        x = linspace(0,2*pi, 200)
        plt.plot(8*cos(x)+8, 8*sin(x)+8,'b')
        offLineX = [-1, 0]
        offLineY = [-1, 8]
        plt.plot(offLineX, offLineY, 'b')


    plt.title(dataSet)
    plt.xlabel('Inches')
    plt.ylabel('Inches')
    #plot( 3*cos(an), 3*sin(an) )
    plt.axis('equal')

    fig2 = plt.figure()
    plt.plot( [x * abs(data[dataSet]['scale']) for x in posez], 'ro')
    plt.ylabel('Inches')


    #plt.zlabel('z')
    

    if save:
        fig1.savefig('Graphs/' + dataSet + '_xy.png', format="png")
        fig1.savefig('Graphs/' + dataSet + '_xy.eps', format="eps")
        fig2.savefig('Graphs/' + dataSet + '_z.png',  format="png")
        fig2.savefig('Graphs/' + dataSet + '_z.eps',  format="eps")
        print "saved"

    plt.show()
    bag.close()
示例#5
0
def plotBag(bag_file, teach, failed):
    global repeat_counter, fig, ax
    quiver_counter = 0

    x_ = []
    y_ = []
    yaw_ = []

    avg_x = 0.
    avg_y = 0.
    avg_yaw = 0.

    temp_x = []
    temp_y = []
    temp_yaw = []

    bag = rosbag.Bag(bag_file)
    for topic, msg, t in bag.read_messages(
            topics=['/vicon/bebop_vtr/bebop_vtr']):
        quaternion = (msg.transform.rotation.x, msg.transform.rotation.y,
                      msg.transform.rotation.z, msg.transform.rotation.w)
        euler = tf.transformations.euler_from_quaternion(quaternion)
        temp_x.append(msg.transform.translation.x)
        temp_y.append(msg.transform.translation.y)
        yaw = euler[2]
        yaw = yaw - .4  # vicon offset
        if (len(temp_yaw) < 3
                or abs(sum(temp_yaw) / float(len(temp_yaw)) - yaw) < 0.5):
            temp_yaw.append(yaw)
        if (quiver_counter < 1
                and len(temp_x) > 10) or (quiver_counter >= 1
                                          and len(temp_x) > 150):
            avg_x = sum(temp_x) / float(len(temp_x))
            avg_y = sum(temp_y) / float(len(temp_y))
            avg_yaw = sum(temp_yaw) / float(len(temp_yaw))

            if (quiver_counter < 1):
                r = 0.4
                u = r * cos(avg_yaw)
                v = r * sin(avg_yaw)
                # ax.quiver(avg_x, avg_y, u, v, angles='xy', scale_units='xy', scale=1.2)
                ax.quiver(avg_y,
                          avg_x,
                          v,
                          u,
                          angles='xy',
                          scale_units='xy',
                          scale=1.2)
                quiver_counter = quiver_counter + 1
            else:
                x_.append(avg_x)
                y_.append(avg_y)
                yaw_.append(avg_yaw)


#			x_.append(avg_x)
#			y_.append(avg_y)
#			yaw_.append(avg_yaw)

            temp_x = []
            temp_y = []
            temp_yaw = []

    bag.close()
    if teach == True:
        # plt.plot(x_, y_, label="Teaching Trail", linewidth=7.0)
        plt.plot(y_, x_, label="Teaching Trail", linewidth=7.0)
    else:
        #	repeat_counter = repeat_counter + 1
        if failed == True:
            plt.plot(y_, x_, linewidth=3.0)
        else:
            plt.plot(y_, x_, linewidth=1.0)
示例#6
0
#!/bin/python
import rosbag
import sys, os
import rospy
import numpy as np
import matplotlib.pyplot as plt

bag = rosbag.Bag(sys.argv[1])
plot = True
speed = 5.0
pre_time = 0.5
time = 1.0e5

for topic, msg, t in bag.read_messages(topics='/command_pose'):
    start_time = msg.header.stamp
    break

d1x = []
d2x = []
d3x = []
d1y = []
d2y = []
d3y = []
d1z = []
d2z = []
d3z = []

t0 = []
x0 = []
y0 = []
z0 = []
示例#7
0
def run(args):
    import sys
    from evo.core import metrics
    from evo.tools import file_interface, settings

    # manually check bins and tols arguments to allow them to be in config files
    if not args.bins or not args.tols:
        logging.error(
            "the following arguments are required: -b/--bins, -t/--tols")
        sys.exit(1)

    settings.configure_logging(args.verbose, args.silent, args.debug)
    if args.debug:
        import pprint
        logging.debug(
            "main_parser config:\n" +
            pprint.pformat({arg: getattr(args, arg)
                            for arg in vars(args)}) + "\n")
    logging.debug(SEP)

    pose_relation = None
    if args.pose_relation == "trans_part":
        pose_relation = metrics.PoseRelation.translation_part
    elif args.pose_relation == "angle_deg":
        pose_relation = metrics.PoseRelation.rotation_angle_deg
    elif args.pose_relation == "angle_rad":
        pose_relation = metrics.PoseRelation.rotation_angle_rad

    traj_ref, traj_est, stamps_est = None, None, None
    ref_name, est_name = "", ""
    if args.subcommand == "tum":
        traj_ref, traj_est = file_interface.load_assoc_tum_trajectories(
            args.ref_file,
            args.est_file,
            args.t_max_diff,
            args.t_offset,
        )
        ref_name, est_name = args.ref_file, args.est_file
    elif args.subcommand == "kitti":
        traj_ref = file_interface.read_kitti_poses_file(args.ref_file)
        traj_est = file_interface.read_kitti_poses_file(args.est_file)
        ref_name, est_name = args.ref_file, args.est_file
    elif args.subcommand == "euroc":
        args.align = True
        logging.info(
            "forcing trajectory alignment implicitly (EuRoC ground truth is in IMU frame)"
        )
        logging.debug(SEP)
        traj_ref, traj_est = file_interface.load_assoc_euroc_trajectories(
            args.state_gt_csv,
            args.est_file,
            args.t_max_diff,
            args.t_offset,
        )
        ref_name, est_name = args.state_gt_csv, args.est_file
    elif args.subcommand == "bag":
        import rosbag
        logging.debug("opening bag file " + args.bag)
        bag = rosbag.Bag(args.bag, 'r')
        try:
            traj_ref, traj_est = file_interface.load_assoc_bag_trajectories(
                bag,
                args.ref_topic,
                args.est_topic,
                args.t_max_diff,
                args.t_offset,
            )
        finally:
            bag.close()
        ref_name, est_name = args.ref_topic, args.est_topic

    main_rpe_for_each(traj_ref,
                      traj_est,
                      pose_relation,
                      args.mode,
                      args.bins,
                      args.tols,
                      args.align,
                      args.correct_scale,
                      ref_name,
                      est_name,
                      args.plot,
                      args.save_plot,
                      args.save_results,
                      args.no_warnings,
                      serialize_plot=args.serialize_plot)
示例#8
0
import argparse
import rosbag

parser = argparse.ArgumentParser(description='Extract mp3 file from audio topic in ros bag.')
parser.add_argument('bagfile', help='ros bag file to use as input')
parser.add_argument('outfile', help='destination of output')
args = parser.parse_args()

bag = rosbag.Bag(args.bagfile)
output = open(args.outfile, 'w')

for topic, msg, t in bag.read_messages(topics=['/audio/audio']):
    output.write(''.join(msg.data))

bag.close()
output.close()
bag_file = '/home/mayank_sati/Documents/2011_09_30_drive_18.bag'

# info_dict = yaml.load(Bag(bag_file, 'r')._get_yaml_info())
#
# # Print the information contained in the info_dict
# info_dict.keys()
# for topic in info_dict["topics"]:
#     print("-"*50)
#     for k,v in topic.items():
#         print(k.ljust(20), v)

import rosbag
import mayavi.mlab
import sensor_msgs.point_cloud2 as pc2

bag = rosbag.Bag(bag_file)
for topic, msg, t in bag.read_messages(topics=['velodyne_points']):
    # print (msg)
    # msgString = str(msg)
    # msgList = string.split(msgString, '\n')
    lidar = np.fromstring(msg.data, dtype=np.float32)
    lidar = lidar.reshape(-1, 4)
    a = pc2.read_points(msg)

    lidar = pc2.read_points(msg)
    lidar = np.array(list(lidar))
    # import mayavi.mlab

    fig = mayavi.mlab.figure(bgcolor=(0, 0, 0), size=(800, 500))

    mayavi.mlab.points3d(
def preprocess(path='./', new_thymio=False, distances=None):
    """Extracts data to bagfiles in a given path and converts them into dataframes and saves them into an HDF5 binary file.

	Args:
		path: the path in which bag files are.
		new_thymio: a string containing the number of the thymio for new thymios or False for the old version thymio.
	"""
    if new_thymio:
        cam = '/thymio' + new_thymio + '/camera/image_raw/compressed'
        prox = '/thymio' + new_thymio + '/proximity/laser'
        odom = '/thymio' + new_thymio + '/odom'
    else:
        cam = '/camera/image_rect_color/compressed'
        prox = '/proximity/laser'
        odom = '/odom'

    if distances is None:
        distances = [0.0, 3.3, 6.6, 10.0]

    extractors = {
        cam: lambda m: {
            'camera': jpeg2np(m.data, (80, 64))
        },
        prox: lambda m: {
            'prox_lx': m.intensities[4],
            'prox_cl': m.intensities[3],
            'prox_cx': m.intensities[2],
            'prox_cr': m.intensities[1],
            'prox_rx': m.intensities[0]
        },
        odom: lambda m: {
            'pos_x': m.pose.pose.position.x,
            'pos_y': m.pose.pose.position.y,
            'theta': quaternion2yaw(m.pose.pose.orientation)
        }
    }

    files = [file[:-4] for file in os.listdir(path) if file[-4:] == '.bag']

    if not files:
        print('No bag files found!')
        return None

    h5f = h5py.File('data/' + str(datetime.now()) + '.h5', 'w')

    for index, file in enumerate(files):
        print('Found ' + path + file + '.bag')

        dfs = bag2dfs(rosbag.Bag(path + file + '.bag'), extractors)

        df = mergedfs(dfs)
        df = next_prox(df, distances)

        l = len(df)

        df.fillna(-1.0, inplace=True)
        df['camera'] = df['camera'].apply(lambda x: (x - x.mean()) / x.std())

        Xs = h5f.create_dataset('bag' + str(index) + '_x',
                                shape=(l, 64, 80, 3),
                                maxshape=(None, 64, 80, 3),
                                dtype=np.float,
                                data=None,
                                chunks=True)
        Ys = h5f.create_dataset('bag' + str(index) + '_y',
                                shape=(l, 5 * len(distances)),
                                maxshape=(None, 5 * len(distances)),
                                dtype=np.float,
                                data=None,
                                chunks=True)

        cols = [
            't_%.1f_prox_%s' % (d, sensor) for d in distances
            for sensor in ['lx', 'cl', 'cx', 'cr', 'rx']
        ]

        Xs[:] = np.stack(df['camera'].values)
        Ys[:] = df[cols].values

    h5f.close()
示例#11
0
                    clusters[obj_count + 1] = []
                    for g in group:
                        predicted_obj_id[g] = obj_count + 1
                        clusters[obj_count + 1].append(g)
                    obj_count += 1

    t = time.time() - t
    comp_time.append(t)
    print('Scan #%3d: cur:%4d/%3d agg:%5d/%3d acc %.2f time %.3f' %
          (count_msg, len(update_list),
           len(set(obj_id_list[len(obj_id_list) - len(update_list):])),
           len(point_id_map), len(set(obj_id_list)), acc, t))
    count_msg += 1


bag = rosbag.Bag('data/%s_%s.bag' % (dataset, VAL_AREA), 'r')
poses = []
for topic, msg, t in bag.read_messages(topics=['slam_out_pose']):
    poses.append([msg.pose.position.x, msg.pose.position.y])
i = 0
for topic, msg, t in bag.read_messages(topics=['laser_cloud_surround']):
    process_cloud(msg, poses[i])
    i += 1
#	if i==1:
#		break

#ignore clutter in evaluation
valid_mask = numpy.array(obj_id_list) > 0
gt_obj_id = numpy.array(obj_id_list)[valid_mask]
predicted_obj_id = numpy.array(predicted_obj_id)[valid_mask]
gt_cls_id = numpy.array(cls_id_list)[valid_mask]
class singleFrame:
    def __init__(self, lmsg, rmsg, vmsg, frameN):
        self.left = cvb.imgmsg_to_cv2(lmsg)
        self.right = cvb.imgmsg_to_cv2(rmsg)
        self.viso = copy.deepcopy(vmsg)
        self.nNumber = frameN

    def getHomography(self):
        return deserialHomography(self.viso.homography)


inDir = sys.argv[1]

print("Reading from " + inDir)

inputBag = rosbag.Bag(inDir)

leftImages = []
rightImages = []
outputData = []

print("extracting Topic Data")

for topic, msg, t in inputBag.read_messages(
        topics=['/viso_extractor/output'
                ]):  #,'/bumblebee/left/ROI','/bumblebee/right/ROI']):
    if (topic == "/viso_extractor/output"):
        outputData.append(msg)
        print(len(outputData))
    # if(topic=="/bumblebee/left/ROI"):
    #     leftImages.append(msg)
示例#13
0
flip = False

# Drone topics and laps used to dynamically change what topics the image writes to in rosbag
# i.e. the first half ring of drone_1 is "drone_1_lap_1" in the rosbag, second half ring is "drone_1_lap_2" etc
# simplifies the extraction process
drone_topic = 'drone_'
drone_lap = 1
drone_1_topic = 'drone_1'
drone_2_topic = 'drone_2'
drone_1_lap = 1
drone_2_lap = 1

drone_image = Image()
drone_image_1 = Image()
drone_image_2 = Image()
bag = rosbag.Bag('image_bag', 'w') 

# Callbacks to set the drone's camera feed to a temp "drone_image" variable for one or double drones
# in order to write to rosbag
def image_callback(Image):

	global drone_image

	drone_image = Image

def image1_callback(Image):

	global drone_image_1

	drone_image_1 = Image
示例#14
0
import rosbag
from lfd import bag_proc

bag = rosbag.Bag("/home/joschu/Data/bags/test_rope.bag")
bag_proc.get_button_presses(bag)
示例#15
0
if __name__ == '__main__':
    rospy.init_node('sensor_model', anonymous=True)

    # get map
    rospy.wait_for_service('static_map')
    static_map = rospy.ServiceProxy('/static_map', GetMap)
    rospy.logerr("Done waiting for map service.")
    map_resp = static_map()
    map_data_shaped = np.array(map_resp.map.data).reshape(
        (map_resp.map.info.height, map_resp.map.info.width))
    map_data_shaped = map_data_shaped >= 0

    # read laser scan from bag file
    bag_path = rospy.get_param("laser_scan_bag")
    scan_msg = list(rosbag.Bag(bag_path).read_messages())[0][1]

    # generate and plot heatmap
    heatmap = generate_heatmap(map_resp, scan_msg)
    plot_heatmap(map_data_shaped, heatmap)

    # plot discretization vs runtime
    #for theta in [3,5,7,10,15,20,50]:
    #  HEATMAP_NUM_THETA = theta
    #for step in [200, 100, 50, 20, 10, 5, 3, 1]:
    #  HEATMAP_XY_STEP = step
    #  start = time.time()
    #  heatmap = generate_heatmap(map_resp, scan_msg)
    #  plot_heatmap(map_data_shaped, heatmap)
    #  end = time.time()
示例#16
0
    return time

def get_next_point(data, ptCnt):
    if ptCnt >= data.width:
        point3fi = struct.pack("4f", 0,0,0,0)
    else:
        point3fi =  data.data[ptCnt*SIZE_OF_POINT+0:ptCnt*SIZE_OF_POINT+4]
        point3fi += data.data[ptCnt*SIZE_OF_POINT+4:ptCnt*SIZE_OF_POINT+8]
        point3fi += data.data[ptCnt*SIZE_OF_POINT+8:ptCnt*SIZE_OF_POINT+12]
        point3fi += data.data[ptCnt*SIZE_OF_POINT+16:ptCnt*SIZE_OF_POINT+20]
    ptCnt += 1
    return point3fi, ptCnt

# main
outf = open(DSV_PATH, "wb")
bag = rosbag.Bag(ROS_BAG_PATH)

for topic, msg, ts in bag.read_messages(topics = [NAV_TOPIC, VELO_TOPIC]):
    # nav/Odometry messages
    if topic == NAV_TOPIC:
        navMsg = msg
    # sensor_msgs/PointCloud2 message
    if topic == VELO_TOPIC:
        gx, gy, gz, roll, pitch, yaw = parse_nav_msg(navMsg)
        millisec = int(get_millisec(ts))
        sNav = struct.pack("6dq", roll, pitch, yaw, gx, gy, gz, millisec)
        print(millisec, roll, pitch, yaw)
        # parse velodyne points
        ptCnt = 0
        for i in range(BLOCK_NUM_PER_FRAME / VELO_HZ_RATIO):
            outf.write(sNav)
示例#17
0
    sys.exit(2)

info_dict = yaml.load(
    subprocess.Popen(['rosbag', 'info', '--yaml', infilename],
                     stdout=subprocess.PIPE).communicate()[0])

msg_count = 0
for topic in info_dict["topics"]:
    msg_count += topic["messages"]

print '\nSpeeding up bag file ' + infilename + ' consisting of ' + str(
    msg_count) + ' messages by a factor of ' + str(rate)

gotFirstTimestamp = False
msg_index = 0
outbag = rosbag.Bag(outfilename, 'w')
try:
    for topic, msg, timestamp in rosbag.Bag(infilename).read_messages():
        if not gotFirstTimestamp:
            firstTimestamp = timestamp
            gotFirstTimestamp = True

        # Calculate corrected timestamp
        correctedTimestamp = firstTimestamp + (timestamp -
                                               firstTimestamp) / rate

        # Fix header timestamps
        if msg._has_header:
            msg.header.stamp = correctedTimestamp

        # Fix TF transforms
示例#18
0
            break
        elif entry == 'n' or entry == 'N':
            sys.exit()

print "\nFormatting: .rosbag -> .aedat (This should take a couple of minutes)\n"

# open the file and write the headers
file = open(aedatFile, "w")
file.write('#!AER-DAT2.0\r\n')
file.write('# This is a raw AE data file created by saveaerdat.m\r\n')
file.write(
    '# Data format is int32 address, int32 timestamp (8 bytes total), repeated for each event\r\n'
)
file.write('# Timestamps tick is 1 us\r\n')

# open the rosbag file and process the events
bag = rosbag.Bag(bagFile)
for topic, msg, t in bag.read_messages(topics=['/dvs/events']):
    for e in msg.events:

        ts = int(e.ts.to_nsec() / 1000.0)
        x = '{0:07b}'.format(e.x)
        y = '{0:07b}'.format(e.y)
        p = '1' if e.polarity else '0'
        address = "0" + y + x + p

        # write the event using big endian format
        file.write("%s" % struct.pack('>I', int(address, 2)))
        file.write("%s" % struct.pack('>I', int(ts)))

bag.close()
    mingap = max(mingap, -mingap)
    for j in positions:
        gap = j.header.stamp.nsecs - nsec
        gap = max(gap, -gap)
        if gap < mingap:
            mingap = gap
            argmin = j
    img = bridge.imgmsg_to_cv2(msg, "bgr8")
    return [img, argmin.position, argmin.velocity, argmin.effort, nsec]


if __name__ == "__main__":
    imageName = sys.argv[1]
    jointName = sys.argv[2]
    saveName = sys.argv[3]
    imageBag = rosbag.Bag(imageName)
    jointBag = rosbag.Bag(jointName)
    bridge = CvBridge()
    cur = 0
    joints = []
    data = []
    for topic, msg, t in jointBag.read_messages(
            topics=['/robot/joint_states']):
        sectime = msg.header.stamp.secs
        if sectime > cur:
            cur = sectime
            joints.append([msg])
        else:
            joints[-1].append(msg)
    firstT = joints[0][0].header.stamp.secs
    for topic, msg, t in imageBag.read_messages(topics=['image_raw']):
示例#20
0
def bag_tf_poses(timed_pos_quat,
                 bagpath,
                 child_frame_id="vins",
                 red=1.0,
                 green=1.0,
                 blue=0.0):
    """

    :param timed_pos_quat:
    :param bagpath:
    :return:
    """
    if os.path.isfile(bagpath):
        bag = rosbag.Bag(bagpath, "a")
    else:
        bag = rosbag.Bag(bagpath, "w")

    path = Path()
    for index, entry in enumerate(timed_pos_quat):
        odometry = Odometry()
        odometry.header.stamp = entry[0]
        odometry.header.seq = index
        odometry.header.frame_id = "world"
        odometry.child_frame_id = child_frame_id

        odometry.pose.pose.position.x = entry[1]
        odometry.pose.pose.position.y = entry[2]
        odometry.pose.pose.position.z = entry[3]
        odometry.pose.pose.orientation.x = entry[4]
        odometry.pose.pose.orientation.y = entry[5]
        odometry.pose.pose.orientation.z = entry[6]
        odometry.pose.pose.orientation.w = entry[7]

        odometry.twist.twist.linear.x = 0
        odometry.twist.twist.linear.y = 0
        odometry.twist.twist.linear.z = 0

        pose_stamped = PoseStamped()
        pose_stamped.header.stamp = entry[0]
        pose_stamped.header.seq = index
        pose_stamped.header.frame_id = "world"
        pose_stamped.pose = odometry.pose.pose

        path.header.stamp = entry[0]
        path.header.seq = index
        path.header.frame_id = "world"
        path.poses.append(pose_stamped)
        if len(path.poses) > 2:
            path.poses.pop(0)

        marker = rviz_camera_frustum.generate_frustum_marker(
            entry[1:], 1.0, entry[0], red, green, blue)
        marker.header.frame_id = "world"
        marker.header.stamp = entry[0]
        marker.header.seq = index

        # see https://wiki.ros.org/rviz/DisplayTypes/Marker#Triangle_List_.28TRIANGLE_LIST.3D11.29_.5B1.1.2B-.5D for drawing complex markers
        bag.write("/path_{}".format(child_frame_id), path, entry[0])
        bag.write("/odom_{}".format(child_frame_id), marker, entry[0])

    bag.close()
示例#21
0
try:
    width = sys.argv[sys.argv.index('-w') + 1]
    height = sys.argv[sys.argv.index('-h') + 1]
    output_size = [int(width), int(height)]
except ValueError:
    print 'No output file specified'
    output_size = (64, 48)

# Save the last read image for use as Y0 when command is read
Y_last = 0

# Keep track of the actual zoom for the last command and the current zoom
zoom_last = 250
zoom = 100

out_bag = rosbag.Bag(outfile, 'w')

# Define state integers for image capturing states
state_takeY0 = 0  # Waiting for first image
state_wait_for_move = 1  # Wait for camera to start moving
state_cam_moving = 2  # State for when camera is moving
state_takeY1 = 3  # Waiting for last image

next_state = -1
state_capture = 0
diff_treshold = 0.001

move_waits = 0
move_timeout = 9
move_time_timeout = 0.5
#time0 = rospy.get_time()
示例#22
0
def create_plots(
    bagfile,
    output_pdf_file,
    output_csv_file="results.csv",
    groundtruth_bagfile=None,
    rmse_rel_start_time=0,
    rmse_rel_end_time=-1,
):
    bag = rosbag.Bag(bagfile)
    groundtruth_bag = rosbag.Bag(
        groundtruth_bagfile) if groundtruth_bagfile else bag
    bag_start_time = bag.get_start_time()

    has_imu_augmented_graph_localization_state = has_topic(bag, "/gnc/ekf")
    has_imu_bias_tester_poses = has_topic(bag, "/imu_bias_tester/pose")
    sparse_mapping_poses = poses.Poses("Sparse Mapping",
                                       "/sparse_mapping/pose")
    ar_tag_poses = poses.Poses("AR Tag", "/ar_tag/pose")
    imu_bias_tester_poses = poses.Poses("Imu Bias Tester",
                                        "/imu_bias_tester/pose")
    vec_of_poses = [ar_tag_poses, imu_bias_tester_poses]
    load_pose_msgs(vec_of_poses, bag, bag_start_time)
    has_depth_odom = has_topic(bag, "/loc/depth/odom")
    depth_odom_relative_poses = poses.Poses("Depth Odom", "/loc/depth/odom")
    load_odometry_msgs([depth_odom_relative_poses], bag, bag_start_time)
    groundtruth_vec_of_poses = [sparse_mapping_poses]
    load_pose_msgs(groundtruth_vec_of_poses, groundtruth_bag, bag_start_time)

    graph_localization_states = loc_states.LocStates("Graph Localization",
                                                     "/graph_loc/state")
    imu_augmented_graph_localization_states = loc_states.LocStates(
        "Imu Augmented Graph Localization", "/gnc/ekf")
    vec_of_loc_states = [
        graph_localization_states,
        imu_augmented_graph_localization_states,
    ]
    load_loc_state_msgs(vec_of_loc_states, bag, bag_start_time)

    imu_bias_tester_velocities = velocities.Velocities(
        "Imu Bias Tester", "/imu_bias_tester/velocity")
    load_velocity_msgs(imu_bias_tester_velocities, bag, bag_start_time)

    bag.close()

    with PdfPages(output_pdf_file) as pdf:
        add_graph_plots(
            pdf,
            sparse_mapping_poses,
            ar_tag_poses,
            graph_localization_states,
            imu_augmented_graph_localization_states,
        )
        if has_imu_bias_tester_poses:
            add_imu_bias_tester_poses(pdf, imu_bias_tester_poses,
                                      sparse_mapping_poses)
            add_imu_bias_tester_velocities(pdf, imu_bias_tester_velocities)
        if has_imu_augmented_graph_localization_state:
            add_other_loc_plots(
                pdf,
                graph_localization_states,
                imu_augmented_graph_localization_states,
                sparse_mapping_poses,
                ar_tag_poses,
            )
        else:
            add_other_loc_plots(pdf, graph_localization_states,
                                graph_localization_states)
        if has_depth_odom:
            depth_odom_poses = utilities.make_absolute_poses_from_relative_poses(
                sparse_mapping_poses, depth_odom_relative_poses,
                "Depth Odometry")
            plot_poses(pdf, depth_odom_poses, sparse_mapping_poses,
                       ar_tag_poses)
            # Note that for absolute time difference tolerance depth images and groudtruth use different sensor data
            # and therefore have less similar timestamps. This timestamp difference reduces the accuracy of depth odometry
            # groundtruth comparison.
            plot_loc_state_stats(
                pdf,
                depth_odom_poses,
                sparse_mapping_poses,
                output_csv_file,
                "depth_odometry_",
                0.01,
                False,
                rmse_rel_start_time=rmse_rel_start_time,
                rmse_rel_end_time=rmse_rel_end_time,
            )
            plot_covariances(
                pdf,
                depth_odom_relative_poses.times,
                depth_odom_relative_poses.covariances.position,
                "Depth Odometry Position",
            )
            plot_covariances(
                pdf,
                depth_odom_relative_poses.times,
                depth_odom_relative_poses.covariances.orientation,
                "Depth Odometry Orientation",
            )
        plot_loc_state_stats(
            pdf,
            graph_localization_states,
            sparse_mapping_poses,
            output_csv_file,
            rmse_rel_start_time=rmse_rel_start_time,
            rmse_rel_end_time=rmse_rel_end_time,
        )
        plot_loc_state_stats(
            pdf,
            imu_augmented_graph_localization_states,
            sparse_mapping_poses,
            output_csv_file,
            "imu_augmented_",
            0.01,
            rmse_rel_start_time=rmse_rel_start_time,
            rmse_rel_end_time=rmse_rel_end_time,
        )
        if has_imu_bias_tester_poses:
            plot_loc_state_stats(
                pdf,
                imu_bias_tester_poses,
                sparse_mapping_poses,
                output_csv_file,
                "imu_bias_tester_",
                0.01,
                False,
                rmse_rel_start_time=rmse_rel_start_time,
                rmse_rel_end_time=rmse_rel_end_time,
            )
示例#23
0
        # armLog.EEF_pos.y = pos[1]
        # armLog.EEF_pos.z = pos[2]
        # armLog.f_ext = right_arm_ctrl.force_measured
        # bag.write('right_arm_log',armLog)

        pos, ori = left_arm_ctrl.get_pose_arm()
        armLog.robot_error_pos = sqrt((np.transpose(left_arm_ctrl.pos_error) *
                                       left_arm_ctrl.pos_error)[0, 0])
        armLog.robot_error_orient = sqrt(
            (np.transpose(left_arm_ctrl.orient_error) *
             left_arm_ctrl.orient_error)[0, 0])
        armLog.EEF_pos.x = pos[0]
        armLog.EEF_pos.y = pos[1]
        armLog.EEF_pos.z = pos[2]
        armLog.f_ext = left_arm_ctrl.force_measured
        bag.write('left_arm_log', armLog)

        rate.sleep()

if __name__ == '__main__':
    try:
        time_now = time.strftime("%H:%M:%S")
        date = time.strftime("%d-%m")
        bag = rosbag.Bag(
            'Kinematic_Control_Bag_' + date + '_' + time_now + '.bag', 'w')
        main(bag)
    except rospy.ROSInterruptException:
        pass
    finally:
        bag.close()
示例#24
0
def bag_to_dataframe(bag_name,
                     include=None,
                     exclude=None,
                     parse_header=False,
                     seconds=False):
    '''
    Read in a rosbag file and create a pandas data frame that
    is indexed by the time the message was recorded in the bag.

    :bag_name: String name for the bag file
    :include: None, String, or List  Topics to include in the dataframe
               if None all topics added, if string it is used as regular
                   expression, if list that list is used.
    :exclude: None, String, or List  Topics to be removed from those added
            using the include option using set difference.  If None no topics
            removed. If String it is treated as a regular expression. A list
            removes those in the list.

    :seconds: time index is in seconds

    :returns: a pandas dataframe object
    '''
    # get list of topics to parse
    yaml_info = get_bag_info(bag_name)
    bag_topics = get_topics(yaml_info)
    bag_topics = prune_topics(bag_topics, include, exclude)
    length = get_length(bag_topics, yaml_info)
    msgs_to_read, msg_type = get_msg_info(yaml_info, bag_topics, parse_header)

    bag = rosbag.Bag(bag_name)
    dmap = create_data_map(msgs_to_read)

    # create datastore
    datastore = {}
    for topic in dmap.keys():
        for f, key in dmap[topic].iteritems():
            t = msg_type[topic][f]
            if isinstance(t, int) or isinstance(t, float):
                arr = np.empty(length)
                arr.fill(np.NAN)
            elif isinstance(t, list):
                arr = np.empty(length)
                arr.fill(np.NAN)
                for i in range(len(t)):
                    key_i = '{0}{1}'.format(key, i)
                    datastore[key_i] = arr.copy()
                continue
            else:
                arr = np.empty(length, dtype=np.object)
            datastore[key] = arr

    # create the index
    index = np.empty(length)
    index.fill(np.NAN)

    # all of the data is loaded
    for idx, (topic, msg,
              mt) in enumerate(bag.read_messages(topics=bag_topics)):
        try:
            if seconds:
                index[idx] = msg.header.stamp.to_sec()
            else:
                index[idx] = msg.header.stamp.to_nsec()
        except:
            if seconds:
                index[idx] = mt.to_sec()
            else:
                index[idx] = mt.to_nsec()
        fields = dmap[topic]
        for f, key in fields.iteritems():
            try:
                d = get_message_data(msg, f)
                if isinstance(d, tuple):
                    for i, val in enumerate(d):
                        key_i = '{0}{1}'.format(key, i)
                        datastore[key_i][idx] = val
                else:
                    datastore[key][idx] = d
            except:
                pass

    bag.close()

    # convert the index
    if not seconds:
        index = pd.to_datetime(index, unit='ns')

    # now we have read all of the messages its time to assemble the dataframe
    return pd.DataFrame(data=datastore, index=index)
示例#25
0
if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--bag-name", default="")
    parser.add_argument("-n", "--topic-list", nargs="+", default=default_topics)

    args = parser.parse_args()

    # Create topic dictionary
    freq_analyzer = Topic.initDictFromTopics(args.topic_list)
    # Analyse resource loads
    cpu_analyzer = CpuUsage()
    mem_analyzer = MemUsage()

    # Go through the messages in the bag file
    for topic_bag, msg, t in rosbag.Bag(args.bag_name).read_messages():
        # Add message to cpu analyser
        cpu_analyzer.addCpuMessage(topic_bag, msg)
        # Add message to mem analyser
        mem_analyzer.addMemMessage(topic_bag, msg)
        # Check if the topic matches
        for topic in args.topic_list:
            if topic_bag == topic:
                # Calculate hz
                freq_analyzer[topic].addTimestamp(msg.header.stamp.to_sec())

    # Plot the result
    cpu_analyzer.printResult()
    mem_analyzer.printResult()
    Topic.plotResults(freq_analyzer)
示例#26
0
#!/usr/bin/env python2
""" Print total cumulative serialized msg size per topic in a ROS Bag file """
import rosbag
import sys

# Calculate size of messages in bag file
topic_size_dict = {}
for topic, msg, time in rosbag.Bag(sys.argv[1], 'r').read_messages(raw=True):
    topic_size_dict[topic] = topic_size_dict.get(topic, 0) + len(msg[1])
topic_size = list(topic_size_dict.items())

# Print results from smallest to largest
topic_size.sort(key=lambda x: x[1])
print("TOPIC\t\t\tSIZE")
for topic, size in topic_size:
    # For more human readable results, determine scale of each value (Kilo, Mega, Giga, Tera)
    order = 0
    while size > 1000:
        size /= 1000
        order += 1
    order_str = ['', 'K', 'M', 'G', 'T'][order]
    print("%s  %d%s" % (topic, size, order_str))
    #         j+=1
    #     #     # print 'should be working'
    #     #     velocities.header.seq = data[i,2]
    #     #     velocities.vector.x = data[i,3]
    #     #     velocities.vector.y = data[i,4]
    #     #     velocities.vector.z = data[i,5]
    #     #     outbag.write('/velocities', velocities, velocities.header.stamp)
    #
    # tf_stat = TFMessage()
    # tf_stat = tf_data
    # for num in range(0,11):
    #     tf_stat.transforms[num].header.stamp.secs=data[0,0]
    #     tf_stat.transforms[num].header.stamp.nsecs=data[0,1]
    #     print data[0,0]
    #     print data[0,1]
    # # print tf_stat
    # outbag.write('/tf_static', tf_stat, tf_stat.transforms[0].header.stamp)


if __name__ == '__main__':
    outbag = rosbag.Bag('/home/jacob/6_5_t1_new.bag', 'w')
    inbag = '/home/jacob/6_5_t1.bag'
    # refbag = '/home/jacob/bags/bag_test.bag'
    # [tf_data, cinfo_data, dinfo_data] = load_ref_data(refbag)
    # [data,data2] = load_data(inbag)
    # bagwriter(inbag, outbag, data, data2, tf_data, cinfo_data, dinfo_data)
    bagwriter(inbag, outbag)

    print "done"
    outbag.close()
示例#28
0
            # Now append the reconstructed images to the copied rosbag
            stamps = np.loadtxt(
                join(reconstructed_images_folder, 'timestamps.txt'))
            if len(stamps.shape) == 2:
                stamps = stamps[:, 1]

            # list all images in the folder
            images = [
                f
                for f in glob.glob(join(reconstructed_images_folder, "*.png"))
            ]
            images = sorted(images)
            print('Found {} images'.format(len(images)))

            with rosbag.Bag(input_bag_filename, 'a') as outbag:

                for i, image_path in enumerate(images):

                    stamp = stamps[i]
                    img = cv2.imread(
                        join(reconstructed_images_folder, image_path), 0)
                    try:
                        img_msg = bridge.cv2_to_imgmsg(img, encoding='mono8')
                        stamp_ros = rospy.Time(stamp)
                        print(img.shape, stamp_ros)
                        img_msg.header.stamp = stamp_ros
                        img_msg.header.seq = i
                        outbag.write(args.image_topic, img_msg,
                                     img_msg.header.stamp)
示例#29
0
    def __init__(self,
                 bagname,
                 joints,
                 individual_waypoints_recorded,
                 seconds_for_each_waypoint,
                 frequency_to_downsample,
                 ignore_last_num_seconds=0.0,
                 js_topic='/joint_states'):
        """Load gesture from the bag name given and remove jerkiness by downsampling"""
        # get bag info
        self.info_bag = yaml.load(
            subprocess.Popen(['rosbag', 'info', '--yaml', bagname],
                             stdout=subprocess.PIPE).communicate()[0])

        # Fill up traj with real trajectory points
        traj = []
        # Also fill up downsampled traj
        downsampled_traj = []
        bag = rosbag.Bag(bagname)
        first_point = True
        num_msgs = 0
        num_downsampled_data_points = 0
        for topic, msg, t in bag.read_messages(topics=[js_topic]):
            rospy.logdebug("Processing new message from rosbag")
            num_msgs += 1
            # Process interesting joints here
            names, positions = self.getNamesAndMsgList(joints, msg)
            if individual_waypoints_recorded:
                # Take all waypoints
                num_downsampled_data_points += 1
                downsampled_traj.append(positions)
            else:
                # Downsample the number of points if recorded using continuous mode
                if num_msgs % frequency_to_downsample == 0:
                    num_downsampled_data_points += 1
                    downsampled_traj.append(positions)
            # Append interesting joints here
            traj.append(positions)
            if first_point:
                # Store first point
                self.gesture_x0 = positions
                first_point = False

        bag.close()
        # Store last point
        self.gesture_goal = positions

        print str(len(traj)) + " points in example traj"
        print "Downsampled traj has: " + str(len(downsampled_traj)) + " points"

        rospy.loginfo("Joints:" + str(joints))
        rospy.loginfo("Initial pose:" + str(self.gesture_x0))
        rospy.loginfo("Final pose: " + str(self.gesture_goal))

        self.motion_joints = joints
        self.motion_traj = downsampled_traj
        if individual_waypoints_recorded:
            self.motion_duration = num_downsampled_data_points * seconds_for_each_waypoint
        else:
            self.motion_duration = self.info_bag['duration']
        rospy.loginfo("Motion duration: " + str(self.motion_duration))
示例#30
0
def run(args):
    from evo.core import metrics
    from evo.tools import file_interface, log

    log.configure_logging(args.verbose, args.silent, args.debug)
    if args.debug:
        import pprint
        logger.debug(
            "main_parser config:\n" +
            pprint.pformat({arg: getattr(args, arg)
                            for arg in vars(args)}) + "\n")
    logger.debug(SEP)

    pose_relation = None
    if args.pose_relation == "full":
        pose_relation = metrics.PoseRelation.full_transformation
    elif args.pose_relation == "rot_part":
        pose_relation = metrics.PoseRelation.rotation_part
    elif args.pose_relation == "trans_part":
        pose_relation = metrics.PoseRelation.translation_part
    elif args.pose_relation == "angle_deg":
        pose_relation = metrics.PoseRelation.rotation_angle_deg
    elif args.pose_relation == "angle_rad":
        pose_relation = metrics.PoseRelation.rotation_angle_rad

    delta_unit = None
    if args.delta_unit == "f":
        delta_unit = metrics.Unit.frames
    elif args.delta_unit == "d":
        delta_unit = metrics.Unit.degrees
    elif args.delta_unit == "r":
        delta_unit = metrics.Unit.radians
    elif args.delta_unit == "m":
        delta_unit = metrics.Unit.meters

    traj_ref, traj_est, stamps_est = None, None, None
    ref_name, est_name = "", ""
    plot_mode = None  # no plot imports unless really needed (slow)
    if args.subcommand == "tum":
        traj_ref, traj_est = file_interface.load_assoc_tum_trajectories(
            args.ref_file,
            args.est_file,
            args.t_max_diff,
            args.t_offset,
        )
        ref_name, est_name = args.ref_file, args.est_file
        if args.plot or args.save_plot:
            from evo.tools.plot import PlotMode
            plot_mode = PlotMode.xyz if not args.plot_mode else PlotMode[
                args.plot_mode]
    elif args.subcommand == "kitti":
        traj_ref = file_interface.read_kitti_poses_file(args.ref_file)
        traj_est = file_interface.read_kitti_poses_file(args.est_file)
        ref_name, est_name = args.ref_file, args.est_file
        if args.plot or args.save_plot:
            from evo.tools.plot import PlotMode
            plot_mode = PlotMode.xz if not args.plot_mode else PlotMode[
                args.plot_mode]
    elif args.subcommand == "euroc":
        args.align = True
        logger.info(
            "forcing trajectory alignment implicitly (EuRoC ground truth is in IMU frame)"
        )
        logger.debug(SEP)
        traj_ref, traj_est = file_interface.load_assoc_euroc_trajectories(
            args.state_gt_csv,
            args.est_file,
            args.t_max_diff,
            args.t_offset,
        )
        ref_name, est_name = args.state_gt_csv, args.est_file
        if args.plot or args.save_plot:
            from evo.tools.plot import PlotMode
            plot_mode = PlotMode.xyz if not args.plot_mode else PlotMode[
                args.plot_mode]
    elif args.subcommand == "bag":
        import rosbag
        logger.debug("opening bag file " + args.bag)
        bag = rosbag.Bag(args.bag, 'r')
        try:
            traj_ref, traj_est = file_interface.load_assoc_bag_trajectories(
                bag,
                args.ref_topic,
                args.est_topic,
                args.t_max_diff,
                args.t_offset,
            )
        finally:
            bag.close()
        ref_name, est_name = args.ref_topic, args.est_topic
        if args.plot or args.save_plot:
            from evo.tools.plot import PlotMode
            plot_mode = PlotMode.xy if not args.plot_mode else PlotMode[
                args.plot_mode]

    main_rpe(traj_ref,
             traj_est,
             pose_relation,
             args.delta,
             delta_unit,
             args.delta_tol,
             args.all_pairs,
             args.align,
             args.correct_scale,
             ref_name,
             est_name,
             args.plot,
             args.save_plot,
             plot_mode,
             args.save_results,
             args.no_warnings,
             serialize_plot=args.serialize_plot)