def export(output_dir, bag_file):
    data = rosbag_pandas.bag_to_dataframe(bag_file)
    bag_name = os.path.splitext(os.path.basename(bag_file))[0]
    output_dir = os.path.abspath(output_dir)

    # for c in data.columns:
    #     print c

    for chart in CHARTS:
        # Filter series
        # Drop rows where all values are missing
        # Resample:
        # - reduces amount of data
        # - reduces gaps in data (gives a nicer chart)
        chart_df = data.filter(items=chart["series"]).dropna(axis=0, how="all").resample('100L', how="first")
        if chart_df.columns.size > 0:
            path = os.path.join(output_dir, bag_name, chart["name"])
            if not os.path.exists(path):
                os.makedirs(path)

            print "Exporting chart %s to %s" % (chart["name"], path)

            os.chdir(path)
            vis = bearcart.Chart(chart_df)
            vis.create_chart(html_path=HTML_FILE, data_path=DATA_FILE,
                             js_path=JS_FILE, css_path=CSS_FILE)
def plot_pid_performance(file):
    df = rpd.bag_to_dataframe(file)

    ax1 = plt.subplot(2, 2, 1)
    ax1.plot(df.index, df.current_velocity__twist_linear_x, '-o', df.index,
             df.twist_cmd__twist_linear_x, '-o')
    plt.ylabel('Vehicle Speed (m/s)')
    plt.legend(['Actual', 'Demand'])

    ax2 = plt.subplot(2, 2, 3, sharex=ax1)
    ax2.plot(df.index, df.vehicle_throttle_cmd__pedal_cmd, '-o', df.index,
             df.vehicle_brake_cmd__pedal_cmd, '-o')
    plt.ylabel('-')
    plt.legend(['Throttle', 'Brake'])

    ax3 = plt.subplot(2, 2, 2, sharex=ax1)
    ax3.plot(df.index, df.current_velocity__twist_angular_z, '-o', df.index,
             df.twist_cmd__twist_angular_z, '-o')
    plt.ylabel('Yaw Rate (rad/s)')
    plt.legend(['Actual', 'Demand'])

    # ax4 = plt.subplot(2, 2, 4, sharex=ax1)
    # ax4.plot(df.index, df.vehicle_steering_cmd__steering_wheel_angle_cmd, '-o')
    # plt.ylabel('Steering Angle (rad)')

    ax4 = plt.subplot(2, 2, 4, sharex=ax1)
    ax4.plot(df.index, df.speed_control_diag__error, '-o')
    plt.ylabel('Error')

    plt.show()
Esempio n. 3
0
def test_numbers_3_4():
    df = rosbag_pandas.bag_to_dataframe('data/rosout.bag')
    assert set(df) == {
        '/rosout/file', '/rosout/function', '/rosout/header/frame_id',
        '/rosout/header/seq', '/rosout/header/stamp/nsecs',
        '/rosout/header/stamp/secs', '/rosout/level', '/rosout/line',
        '/rosout/msg', '/rosout/name', '/rosout/topics/0'
    }
Esempio n. 4
0
 def update_df(self, image_keys):
     sample_freq = '{:0.4f}S'.format(1.0 / self.sample_freq_hz)
     df = rosbag_pandas.bag_to_dataframe(bag_name=self.bag_path,
                                         include=self.bag_topics)
     # Bin and resample image and drive command frames in order to gain
     # cleanly labeled data. Sample frequency should be less than
     # or equal to the most infrequent data sequence (e.g. images are
     # typically collected at ~30hz)
     self.df = df.resample(sample_freq).last().dropna()
Esempio n. 5
0
def bag2pickle(paths=None, defaultPath="/home/rhagoletis/catkin/src/World/bags/"):
    '''
    Load  bag filesto make into respective dataframes
    '''
    # defaultPath="/home/rhagoletis/catkin/src/World/bags/"
    if paths is None:
        paths = easygui.fileopenbox(title="Bags to Dataframes"
                                    , default=defaultPath,
                                    multiple=True, filetypes=["*traj.bag"])

    print paths, "\n"
    metadata = None
    tic()
    i = 1
    for path in paths:
        tic()
        print "starting analysis of file %s , %s / %s files" % (path.split('/')[-1], i, len(paths))
        try:
            df = rosbag_pandas.bag_to_dataframe(path, include=['/trajectory'])
        except Exception as e:
            print "Bag has some nonetype error", path, e
            df = None
            parameters = None
            picklepath = None
            continue
        bag = rosbag.Bag(path)

        try:
            for topic, msg, t in bag.read_messages(topics='/metadata'):
                a = msg
                #         parameters=json.loads(a.data)
                #         metadata={"meta":parameters}

            metadata = json.loads(a.data)
            parameters = metadata['parameters']

        except:
            print "no such file!, trying the pickle"
            try:
                metadata = depickler(paths[0].split('.bag')[0])
            except IOError:
                metadata = depickler(paths[0].split('.bag')[0] + '.pickle')

            parameters = metadata['parameters']

        obj = dict(df=df, metadata=metadata)

        picklepath = path + "_df.pickle"
        pickler(obj, picklepath)
        #     df.to_pickle(picklepath)

        i += 1
        toc()
    print "\nanalysis of %s files complete" % len(paths)
    return df, parameters, picklepath
Esempio n. 6
0
def parse_single_run(bag_file, csv_convert=False):
    yaml_info = rosbag_pandas.get_bag_info(bag_file)
    df = rosbag_pandas.bag_to_dataframe(bag_file)
    df['time'] = (df.index - df.index[0]) / np.timedelta64(1, 's')
    df.index = df['time']
    new_col_names = []
    for column in df.columns.values:
        new_col_names.append(column.split('bb_controller_status__')[-1])
    df.columns = new_col_names
    if csv_convert:
        csv_file_name = os.path.splitext(yaml_info["path"])[0] + '.csv'
        df.to_csv(csv_file_name)
    return df
def main():

    global IS_BAG_TO_CSV
    global bag_index

    INFO_FLAG = True

    # ipdb.set_trace()
    rospy.init_node("topic_recorder", anonymous=True)
    rospy.Subscriber("/tag_multimodal", Tag_MultiModal, callback_multimodal)

    if not os.path.isdir(hardcoded_data.bag_save_path):
        os.makedirs(hardcoded_data.bag_save_path)

    csv_index = 1
    #ipdb.set_trace()
    while not rospy.is_shutdown():
        if TOPIC_START_FLAG and INFO_FLAG:
            print "Start to subscribe to topic /tag_multimodal"
            INFO_FLAG = False
        if IS_BAG_TO_CSV:
            print "HI"
            #ipdb.set_trace()
            csv_index = bag_index - 1
            dataframe = rosbag_pandas.bag_to_dataframe(
                os.path.join(hardcoded_data.bag_save_path, str(csv_index),
                             'tag_multimodal.bag'))
            df = dataframe[[
                'tag_multimodal__endpoint_state_pose_position_x',
                'tag_multimodal__endpoint_state_pose_position_y',
                'tag_multimodal__endpoint_state_pose_position_z',
                'tag_multimodal__endpoint_state_pose_position_x',
                'tag_multimodal__endpoint_state_pose_orientation_x',
                'tag_multimodal__endpoint_state_pose_orientation_y',
                'tag_multimodal__endpoint_state_pose_orientation_z',
                'tag_multimodal__endpoint_state_pose_orientation_w',
                'tag_multimodal__wrench_stamped_wrench_force_x',
                'tag_multimodal__wrench_stamped_wrench_force_y',
                'tag_multimodal__wrench_stamped_wrench_force_z',
                'tag_multimodal__wrench_stamped_wrench_torque_x',
                'tag_multimodal__wrench_stamped_wrench_torque_y',
                'tag_multimodal__wrench_stamped_wrench_torque_z',
                'tag_multimodal__tag'
            ]]

            df.to_csv(
                os.path.join(hardcoded_data.bag_save_path, str(csv_index),
                             'tag_multimodal.csv'))
            rospy.loginfo("csv transform finished !!")
            IS_BAG_TO_CSV = False
Esempio n. 8
0
    def _load_bag_data(self, file):
        bag = rosbag_pandas.bag_to_dataframe(file)
        bag = bag.rename(columns={
            self.topic + '__data': 'data',
            self.topic + '__format': 'format'
        })

        df = bag[bag['format'].notnull()]
        self.image_data = df['data'].values
        self.num_images = self.image_data.size
        (self.width,
         self.height) = Image.open(BytesIO(self.image_data[0])).size

        assert self.width == 856 and self.height == 480, "Unexpected image dimensions (%d, %d)" % (
            self.width, self.height)
Esempio n. 9
0
def do_work(bag, include, exclude, output, fill, header):
    # covert a lenght one value to a regex
    if include is not None and len(include) == 1:
        include = include[0]

    # covert a lenght one value to a regex
    if exclude is not None and len(exclude) == 1:
        exclude = exclude[0]
    df = rosbag_pandas.bag_to_dataframe(bag, include=include, exclude=exclude, 
            parse_header=header)
    if fill:
        df = df.ffill().bfill()

    if output is None:
        base, _ = os.path.splitext(bag)
        output = base + '.csv'
    df = rosbag_pandas.clean_for_export(df)
    df.to_csv(output)
Esempio n. 10
0
def do_work(bag, include, exclude, output, fill, header):
    # covert a lenght one value to a regex
    if include is not None and len(include) == 1:
        include = include[0]

    # covert a lenght one value to a regex
    if exclude is not None and len(exclude) == 1:
        exclude = exclude[0]
    df = rosbag_pandas.bag_to_dataframe(bag, include=include, exclude=exclude, 
            parse_header=header)
    if fill:
        df = df.ffill().bfill()

    if output is None:
        base, _ = os.path.splitext(bag)
        output = base + '.csv'
    df = rosbag_pandas.clean_for_export(df)
    df.to_csv(output)
Esempio n. 11
0
def write_kalibr_yaml(filename, imu_topic, camera_file, camera_model):
    """
    Writing yaml file for kalibr based on imu_topic and camera_file
    """
    df = rosbag_pandas.bag_to_dataframe(filename, include=imu_topic,
                                        exclude=None, parse_header=True)
    df_mat = df.values

    imu_quaternion = df_mat[:, -4:]
    print imu_quaternion.shape

    imu_euler = ut.quat2euler(imu_quaternion)

    Ric = ut.euler2mat(*imu_euler[0])
    Rci = Ric.T

    T_cam_imu = np.zeros((4, 4))
    T_cam_imu[0:3, 0:3] = Rci
    T_cam_imu[3, 3] = 1

    print ('The first IMU recording on the camera indicate the rotation of imu\'
    \'when placed on the camera in i frame:\n {}'.format(Ric))

    print ('equivalenty the imu in camera frame will be Rci:\n {} '.format(Rci))

    bag = rosbag.Bag(filename)

    sel_imgs_array, sel_imgs_index, timeshift =
    bp.get_timeshift_imu_images(bag, image_topic, imu_topic)

    print ('timeshift between imu and image recordings:\n\'
    \'{}'.format(timeshift))

    model = 'omni'
    dist_model = 'radtan'

    K, dist_coef, intrinsics = ut.import_calib(camera_file, camera_model)

    #N = 320    #M = 260
    N = K['width']
    M = K['height']
    print('width {}, height {}'.format(N, M))
    bp.write_yaml(model, intrinsics, dist_model, dist_coef, T_cam_imu,
                  timeshift, N, M, 'camchain.yaml')
def print_benchmark_table(include_rosbag_pandas=True, repeat=True):
    benchmarks.setup()

    bagnames = ["points_1k.bag", "points_1m.bag", "points_10m.bag"]

    df = pd.DataFrame(columns=bagnames)
    if include_rosbag_pandas:
        rosbag_pandas_times = []
        for bagname in tqdm(bagnames):
            rosbag_pandas_times.append(
                timeit_auto(lambda: rosbag_pandas.bag_to_dataframe(
                    ROOT_DIR + "/bags/" + bagname),
                            repeat=repeat))
        df = df.append(
            pd.Series(rosbag_pandas_times,
                      index=bagnames,
                      name="rosbag_pandas"))

    fast_rosbag_pandas_times = []
    for bagname in tqdm(bagnames):
        fast_rosbag_pandas_times.append(
            timeit_auto(lambda: fast_rosbag_pandas.rosbag_to_dataframe(
                ROOT_DIR + "/bags/" + bagname, "points"),
                        repeat=repeat))
    df = df.append(
        pd.Series(fast_rosbag_pandas_times,
                  index=bagnames,
                  name="fast_rosbag_pandas"))

    df = df.transpose()

    # Add comparison
    if include_rosbag_pandas:
        df["Speedup"] = df["rosbag_pandas"] / df["fast_rosbag_pandas"]

    print(
        tabulate(
            df,
            tablefmt="pipe",
            headers=[
                "rosbag_pandas (s)", "fast_rosbag_pandas (s)", "speedup factor"
            ],
            floatfmt=(".4f", ".4f", ".4f", ".1f"),
        ))
Esempio n. 13
0
    def read_data_bagfile(self,
                          bagname,
                          exclude=None,
                          include='/base/odometry_controller/odometry'):
        '''
        read data from a bagfile generated with ros
        :param bagname: path to bagfile
        :param exclude: exclude topics (regular expression possible)
        :param include: include topics (regular expression possible)
        :return: --
        '''
        global m_A
        global n_A

        df = rp.bag_to_dataframe(bagname,
                                 include=include,
                                 exclude=exclude,
                                 seconds=True)

        fieldnames = []
        for dat in self.data:
            inc = include[1:] + '__'
            fieldnames.append(
                (inc.replace('/', '_') + dat[6:]).replace('.', '_'))
        fieldnames[2] = 'index'

        # save fieldnames from dataframe to matrix A
        A = df.reset_index()[[
            fieldnames[i] for i in xrange(2, fieldnames.__len__())
        ]].as_matrix()
        # dummy data for '%time' and 'field.header.stamp', because both are not necessary
        B = np.ones([A.shape[0], 2])
        # put data matrix A and dummy matrix B together
        A = np.concatenate((B, A), axis=1)
        # set time to start at 0s
        A[:, AD.FHS] = A[:, AD.FHS] - A[0, AD.FHS]

        m_A, n_A = A.shape
        self.A = A
        print 'Time of Interval: {:.4f} [s]'.format(self.A[-1, AD.FHS] -
                                                    self.A[0, AD.FHS])
Esempio n. 14
0
### Create directory to save evaluation plots to set
parent_dir = "/home/nasib/datasets/evaluated"
path_plots = os.path.join(parent_dir, set_of_bags)
os.mkdir(path_plots)
### End

### Specify path to directory containing rosbags for evaluation
bag_path = "/home/nasib/datasets/rosbags/evaluation_sets/" + set_of_bags
files_list = os.listdir(bag_path)
print(files_list)

file_path = bag_path + '/' + file
plot_file = strip_name(file)

### Load all different topics into separate DataFrames
df_mpc_weights = rosbag_pandas.bag_to_dataframe(
    file_path, include=['/peregrine/mpc_costs'])

### Create time column for every DataFrame
create_timecolumn(df_mpc_weights, '/peregrine/mpc_costs')

### Plot all relevant information
plt.style.use('seaborn-whitegrid')
fig, ax = plt.subplots(nrows=3, ncols=1)
fig.suptitle(plot_file)
fig.set_size_inches(h=14, w=21)
fig.subplots_adjust(left=0.05,
                    bottom=0.05,
                    right=0.95,
                    top=0.9,
                    wspace=0.25,
                    hspace=0.25)
Esempio n. 15
0
 def read_data(self):
     df = rbpd.bag_to_dataframe(self._data_path)
     df.head()
Esempio n. 16
0
def export(output_dir, bag_file):
    data = rosbag_pandas.bag_to_dataframe(bag_file)
    bag_name = os.path.splitext(os.path.basename(bag_file))[0]
    output_dir = os.path.abspath(output_dir)

    #data = data.truncate(before="20150319 13:38:30", after="20150319 13:39:00")
    #data = data.truncate(before="20150319 14:53:30", after="20150319 14:54:30")

    # for c in data.columns:
    #     print c

    for chart in CHARTS:
        # Filter series
        # Drop rows where all values are missing
        # Resample:
        # - reduces amount of data
        # - reduces gaps in data (gives a nicer chart)
        chart_df = data.filter(items=chart["series"]).dropna(
            axis=0, how="all").resample('100L', how="first")
        if chart_df.columns.size > 0:
            path = os.path.join(output_dir, bag_name, chart["name"])
            if not os.path.exists(path):
                os.makedirs(path)

            print "Exporting chart %s to %s" % (chart["name"], path)

            os.chdir(path)
            vis = bearcart.Chart(chart_df)
            vis.create_chart(html_path=HTML_FILE,
                             data_path=DATA_FILE,
                             js_path=JS_FILE,
                             css_path=CSS_FILE)

    #
    # KML export
    #
    base = utm.fromLatLong(47.388650, 8.033269, 368)
    chart_df = data.filter(items=KML_SERIES).dropna(
        axis=0, how="all").resample('100L', how="first")
    if chart_df.columns.size > 0:
        mavros = KML_SERIES[0] in chart_df.columns
        path = os.path.join(output_dir, bag_name + '.kml')
        print "Exporting KML to %s" % (path)

        fid = open(path, 'w')
        fid.write(kmlhead(bag_name))
        for row_index, row in chart_df.iterrows():
            x = base.easting
            y = base.northing
            a = base.altitude

            # if (math.isnan(row[KML_SERIES[0]]) or math.isnan(row[KML_SERIES[1]]) or math.isnan(row[KML_SERIES[2]])):
            #     print "found nan"
            #     print row

            if mavros and not (math.isnan(row[KML_SERIES[0]]) or math.isnan(
                    row[KML_SERIES[1]]) or math.isnan(row[KML_SERIES[2]])):
                x = row[KML_SERIES[0]]
                y = row[KML_SERIES[1]]
                a = row[KML_SERIES[2]]

            elif not mavros and not (math.isnan(row[KML_SERIES[3]])
                                     or math.isnan(row[KML_SERIES[4]])
                                     or math.isnan(row[KML_SERIES[5]])):
                x = x + row[KML_SERIES[3]]
                y = y + row[KML_SERIES[4]]
                a = a - row[KML_SERIES[5]]

            else:
                continue

            p = utm.UTMPoint(x, y, zone=base.zone, band=base.band).toMsg()

            fid.write("%f,%f,%f " % (p.longitude, p.latitude, a))

        fid.write(kmltail("absolute"))
        fid.close()
Esempio n. 17
0
def evaluate_benchmark(file_in, do_plot):
    vis = rosbag_pandas.bag_to_dataframe(file_in, include="/pose_visual")
    man = rosbag_pandas.bag_to_dataframe(file_in, include="/pose_estimation")
    timediff = (vis.index[0] - man.index[0]).total_seconds()
    ref = pd.read_pickle('reference_poses.pkl')
    vis = vis.set_index((vis.index - vis.index[0]).total_seconds() + timediff)
    k = 0
    count = 0
    errDist = 0
    errAng = 0
    for row in vis.itertuples():
        while row.Index > ref.iloc[[k]].index:
            k += 1
        if not (ref['pose_estimation__x'].iloc[[k]].values == 0
                or math.isnan(ref['pose_estimation__x'].iloc[[k]])):
            errX = row.pose_visual__x - ref['pose_estimation__x'].iloc[[
                k
            ]].values
            errY = row.pose_visual__y - ref['pose_estimation__y'].iloc[[
                k
            ]].values
            errrotZ = row.pose_visual__rotZ - ref[
                'pose_estimation__rotZ'].iloc[[k]].values
            errDist += (errX**2 + errY**2)**(0.5)
            errAng += abs(errrotZ)
            count += 1
    avgerrD = (errDist / count)
    avgerrR = (errAng / count) * 180 / math.pi

    pts_map = np.zeros((3))
    k = 0
    batime = 0
    for topic, msg, t in rosbag.Bag(file_in).read_messages():
        if topic == "/benchmark":
            batime += msg.BA_times_pass1
            batime += msg.BA_times_pass2
            pts_map[k] = msg.pts_map
            k += 1

    if do_plot:
        refplot = ref.ix[145:325]
        visplot = vis.ix[145:325]
        f, (axx, axy, axr) = plt.subplots(3, sharex=True)
        refpose = axx.plot(refplot.index,
                           refplot['pose_estimation__x'],
                           label='True Posesk')
        vispose = axx.plot(visplot.index,
                           visplot['pose_visual__x'],
                           label='Visual Estimationsk')
        axy.plot(refplot.index, refplot['pose_estimation__y'])
        axy.plot(visplot.index, visplot['pose_visual__y'])
        axr.plot(refplot.index,
                 refplot['pose_estimation__rotZ'] * 180 / math.pi)
        axr.plot(visplot.index, visplot['pose_visual__rotZ'] * 180 / math.pi)

        axx.set_title('X')
        axx.set_ylabel('Position [m]')
        axy.set_title('Y')
        axy.set_ylabel('Position [m]')
        axr.set_title('rotZ')
        axr.set_ylabel('Orientation [$\degree$]')
        axr.set_xlabel("Time [s]")
        axr.legend(('True pose', 'Visual estimation'), 'bottom left')

        fig2 = plt.figure()
        axxyy = fig2.add_subplot(111)
        axxyy.scatter(ref['pose_estimation__y'], ref['pose_estimation__x'])
        axxyy.plot(vis['pose_visual__y'], vis['pose_visual__x'])
        axxyy.set_xlabel("Y position [m]")
        axxyy.set_ylabel('X position [m]')
        axxyy.legend(('Estimated trajectory', 'Reference positions'),
                     'bottom left')
        axxyy.invert_xaxis()

    return (avgerrD, avgerrR, batime)
Esempio n. 18
0
def bag_to_csv(bag_name, include, exclude=None, output=None, header=False,
               imu_kalibr=False):
    """bag_to_csv saves the csv file based on mentioned topic

    Parameters:
    -----------
    bag_name: string, path of the bag

    include: the topic or topics to be included

    exclude: the topic to be excluded from the final csv file

    output: string, the name of the output file

    header: bool, (default=False), either include the header timestamp or not

    imu_kalibr: bool, (default=False), either the final file is for kalibr
    calibration or not, In case of kalibr calibration `imu0.csv` file, Only
    will be saved which includes the angular velocity (gyroscope data) and
    linear acceleroration (accelerometer data) with their timestamp.

    Returns:
    --------
    Saving an csv file in the current directory

    df: Pandas dataframe of the saved file

    """
    if imu_kalibr :
        df_origin = rosbag_pandas.bag_to_dataframe(
            bag_name, include='/imu_3dm_node/imu/data', exclude=exclude,
            parse_header=header)
        df_origin = rosbag_pandas.clean_for_export(df_origin)

        base_name = 'imu_3dm_node_imu_data__'
        gyro_name = 'angular_velocity_'
        acc_name = 'linear_acceleration_'
        imu0_df = df_origin [
            [base_name + 'header_stamp_secs', base_name + 'header_stamp_nsecs',
             base_name + gyro_name + 'x',
             base_name + gyro_name + 'y', base_name + gyro_name + 'z',
             base_name + acc_name + 'x' ,
             base_name + acc_name + 'y', base_name + acc_name + 'z']]

        imu0_mat = imu0_df.values
        timestamp = []
        for i in range(0, imu0_mat.shape[0]):
            string = str(int(imu0_mat[i, 0])) + str(int(imu0_mat[i, 1]))
            timestamp.append(int(string))

        timestamp = np.asarray(timestamp)
        imu0_mat[:, 0] = timestamp
        imu0_mat = np.delete(imu0_mat, 1, 1)

        df = pd.DataFrame(
            imu0_mat, columns = ['timestamp', 'omega_x', 'omega_y', 'omega_z',
                                 'alpha_x', 'alpha_y', 'alpha_z'])

        df.to_csv('imu0.csv', index=False)

    else:

        df = rosbag_pandas.bag_to_dataframe(
            bag_name, include=include, exclude=exclude,
            parse_header=header)
        df = rosbag_pandas.clean_for_export(df)
        if output is None:
            base, _ = os.path.splitext(bag_name)
            output = base + '.csv'

        df.to_csv(output)

    return df
Esempio n. 19
0
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
import pandas as pd
import math
import rosbag_pandas
import os
import shutil

set_of_bags = 'fifth_trial'


def create_timecolumn(df, topic, delta=0.0):
    df['time'] = pd.to_datetime(
        df['{}/header/stamp/secs'.format(topic)], unit='s') + pd.to_timedelta(
            df['{}/header/stamp/nsecs'.format(topic)],
            unit='ns') + pd.to_timedelta(delta, unit='s')


### Create directory to save evaluation plots to set
bag_file = "/home/michbaum/datasets/rosbags/ball_position_test_2_2020-02-13-12-36-50.bag"
### End

df_tracker_states = rosbag_pandas.bag_to_dataframe(
    bag_file, include=['/mbzirc_estimator/imm/state'])
print(df_tracker_states)
# df_tracker_states['time'] = pd.to_datetime(df_tracker_states['/mbzirc_estimator/imm/state/header/stamp/secs'], unit='s') + pd.to_timedelta(df['/mbzirc_estimator/imm/state/header/stamp/nsecs'], unit='ns') + pd.to_timedelta(delta, unit='s')
#
# plt.plot(df_tracker_states['time'], df_tracker_states['/mbzirc_estimator/imm/state/pose/pose/position/x'], color='darkblue', marker='^', linestyle='dotted', linewidth=1, markersize=3, label='Tracker')
# plt.plot(df_tracker_states['time'], df_tracker_states['/mbzirc_estimator/imm/state/pose/pose/position/y'], color='darkblue', marker='^', linestyle='dotted', linewidth=1, markersize=3, label='Tracker')
# plt.plot(df_tracker_states['time'], df_tracker_states['/mbzirc_estimator/imm/state/pose/pose/position/z'], color='darkblue', marker='^', linestyle='dotted', linewidth=1, markersize=3, label='Tracker')
Esempio n. 20
0
	rosbag_filesize = os.path.getsize(currentFile)/float(1024*1024)

	print "Reading rosbag file of " + "%.2f" % rosbag_filesize + " MBytes"

	if (rosbag_filesize > 30.0):
		print "It is not recommended to read such big file (> 30 MBytes). It will slowdown the computer."
		raw_input("Press CTRL+C to exit or other key to continue ...")

	# extract only these topics from the bag
	selected_topics = ['/mavros/global_position/global', '/atlas/raw/Conductivity', '/atlas/raw/DissolvedOxygen', '/atlas/raw/RedoxPotential', '/atlas/raw/Temperature', '/atlas/raw/pH']

	# header/stamp/secs has the timestamp in order
	selected_columns = ['/mavros/global_position/global/header/stamp/secs', '/mavros/global_position/global/latitude', '/mavros/global_position/global/longitude', '/mavros/global_position/global/altitude', '/atlas/raw/Conductivity/ec', '/atlas/raw/DissolvedOxygen/do', '/atlas/raw/RedoxPotential/orp', '/atlas/raw/Temperature/celsius', '/atlas/raw/pH/pH', '/atlas/raw/Conductivity/header/stamp/secs', '/atlas/raw/DissolvedOxygen/header/stamp/secs', '/atlas/raw/RedoxPotential/header/stamp/secs', '/atlas/raw/Temperature/header/stamp/secs', '/atlas/raw/pH/header/stamp/secs']

	df = rosbag_pandas.bag_to_dataframe(currentFile)
	#print (df.shape)
	#print (df.columns)
	#df.to_csv(os.path.join(filepath, filename+"-df3.csv"))

	print "###############################"
	print "Original table size (lines x cols): " + str(df.shape[0]) + " x " + str(df.shape[1])
	print "###############################"
	print ""

	# delete the unwanted columns
	col_names = df.columns
	for col in col_names:
		if col not in selected_columns:
			df = df.drop([col], axis=1)
Esempio n. 21
0
def test_numbers_3_4():
    df = rosbag_pandas.bag_to_dataframe('data/rosout.bag')
    assert set(df) == {
        'rosout__file', 'rosout__function', 'rosout__level', 'rosout__line',
        'rosout__msg', 'rosout__name'
    }
Esempio n. 22
0
files_list = os.listdir(bag_path)
print(files_list)

copy_settings_files_to_plot_directory(files_list, bag_path, path_plots)

filter_list_of_files(files_list)
# filter_list_of_files(files_list)

print(files_list)

for file in files_list:
    file_path = bag_path + '/' + file
    plot_file = strip_name(file)

    ### Load all different topics into separate DataFrames
    df_tracker_states = rosbag_pandas.bag_to_dataframe(
        file_path, include=['/mbzirc_estimator/imm/state'])
    df_tracker_future = rosbag_pandas.bag_to_dataframe(
        file_path, include=['/mbzirc_estimator/imm/future'])
    df_tracker_evaluation = rosbag_pandas.bag_to_dataframe(
        file_path, include=['/mbzirc_estimator/imm/evaluation'])
    df_groundtruth_ball = rosbag_pandas.bag_to_dataframe(
        file_path, include=['/rod_with_ball/vrpn_client/estimated_odometry'])
    df_target_world = rosbag_pandas.bag_to_dataframe(
        file_path, include=['/detection/target_world'])

    # Load entire ROSbag into a DataFrame
    df = rosbag_pandas.bag_to_dataframe(file_path)

    ### Create time column for every DataFrame
    create_timecolumn(df_tracker_states, '/mbzirc_estimator/imm/state')
    create_timecolumn(df_tracker_future, '/mbzirc_estimator/imm/future')
Esempio n. 23
0
def load_bag(filename, include=CSV_TOPICS, column_names=CSV_COLUMN_NAMES):
    df = rosbag_pandas.bag_to_dataframe(filename, include=include)
    pad_bag(df)
    rename_columns(df, column_names)
    normalize_columns(df)
    return df
Esempio n. 24
0
def bag2pickle(paths=None, defaultPath="/home/rhagoletis/catkin/src/World/bags/", toHdf5=True, toPickle=False):
    '''
    Load  bag filesto make into respective dataframes
    '''
    if paths is None:
        paths = easygui.fileopenbox(title="Bags to Dataframes"
                                    , default=defaultPath,
                                    multiple=True, filetypes=["*traj.bag"])

    print paths, "\n"

    metadata = None
    tic()
    i = 1
    for path in paths:
        tic()
        print "starting analysis of file %s , %s / %s files" % (path.split('/')[-1], i, len(paths))

        #read the trajectory topic from bag
        try:
            df = rosbag_pandas.bag_to_dataframe(path, include=['/trajectory'])
        except Exception as e:

            #If error, dump none
            print "Bag has some nonetype error", path, e
            df = None
            parameters = None
            picklepath = None
            continue

        #try to recover parameters dict from the json loaded in later
        bag = rosbag.Bag(path)

        try:
            for topic, msg, t in bag.read_messages(topics='/metadata'):
                a = msg
                #         parameters=json.loads(a.data)
                #         metadata={"meta":parameters}

            metadata = json.loads(a.data)
            parameters = metadata['parameters']

        except:

            #if header recovery fails, try the pickle file stored in the same directory
            print "no such file!, trying the pickle"
            try:
                metadata = depickler(paths[0].split('.bag')[0])
                parameters = metadata['parameters']

            except IOError:
                #try the blank pickle if all fails
                metadata = depickler(paths[0].split('.bag')[0] + '.pickle')
                parameters = metadata['parameters']


            except Exception as e:
                print ("tried all forms of recovery, but no avail",e)
                parameters = None

        #store the parameters in a dict
        df.parameters = parameters
        obj = dict(df=df, metadata=metadata)

        # legacy naming
        # picklepath=path+"_df.pickle"
        # new naming
        if toPickle:
            picklepath = path + ".dfpickle"
            pickler(obj, picklepath)

        # bagStamp=('_'.join(df.parameters['bagFileName'].split('/')[-1].split('_')[:3]))
        # print "stamp is",bagStamp

        if toHdf5:
            picklepath = path + '.h5'
            df.to_hdf(picklepath, 'df')
            #     df.to_pickle(picklepath,)

        i += 1
        toc()
    print "\nanalysis of %s files complete" % len(paths)
    return df, parameters, picklepath
import pandas as pd
import rosbag_pandas
dataframe = rosbag_pandas.bag_to_dataframe(
    '/home/sjtu/BAG/gps_test_0424_2017-04-24-20-52-44.bag'
)  #awesome data processing
dataframe.to_csv('gps_test.csv')
print(dataframe)
print(111)
Esempio n. 26
0
import rosbag_pandas as rp

df = rp.bag_to_dataframe("i-profile-new-cost.bag")
Esempio n. 27
0
            axes[idx].plot(s.index, s.values)
            axes[idx].set_title(label)
            idx = idx + 1
        plt.show()


if __name__ == '__main__':
    ''' Main entry point for the function. Reads the command line arguements
    and performs the requested actions '''
    # Build the command line argument parser
    parser = buildParser()
    # Read the arguments that were passed in
    args = parser.parse_args()
    bag = args.bag
    fields = args.series
    sharey = args.sharey
    yaml_info = rosbag_pandas.get_bag_info(bag)
    topics = rosbag_pandas.get_topics(yaml_info)
    data_keys = parse_series_args(topics, fields)
    df_keys = {}
    topics = []
    for k, v in data_keys.items():
        column = rosbag_pandas.get_key_name(
            v[0]) + '__' + rosbag_pandas.get_key_name(v[1])
        column = column.replace('.', '_')
        df_keys[k] = column
        topics.append(v[0])

    df = rosbag_pandas.bag_to_dataframe(bag, include=topics, seconds=True)
    graph(df, df_keys, sharey)
Esempio n. 28
0
import matplotlib.pyplot as plt
import matplotlib
import pandas as pd
import rosbag_pandas

# parse arguments
parser = argparse.ArgumentParser(description="""Plots logs of log.launch.""")
parser.add_argument("bagfile",
                    type=str,
                    help="""Recorded rosbag file containing monitor logs.""")
parser.add_argument('-e', '--export', type=str, help="Export figure to file.")
args = parser.parse_args()

# load data from bag file
topics = ["/p2os/cmd_vel", "/emergency_stop/dmin", "/dmin_monitor/debug"]
df = rosbag_pandas.bag_to_dataframe(args.bagfile, include=topics)

# start with time 0 (timedelta to floats)
df.index = [(i - df.index[0]) for i in df.index]

print df.columns

# substitutions
substitutions = []
for c in df.columns:
    if '/dmin_monitor/debug/outputs' in c:
        split = c.split('/')
        substitutions.append(split[-2])
substitutions = sorted(set(substitutions))
print substitutions
Esempio n. 29
0
def export(output_dir, bag_file):
    data = rosbag_pandas.bag_to_dataframe(bag_file)
    bag_name = os.path.splitext(os.path.basename(bag_file))[0]
    output_dir = os.path.abspath(output_dir)

    #data = data.truncate(before="20150319 13:38:30", after="20150319 13:39:00")
    #data = data.truncate(before="20150319 14:53:30", after="20150319 14:54:30")

    # for c in data.columns:
    #     print c

    for chart in CHARTS:
        # Filter series
        # Drop rows where all values are missing
        # Resample:
        # - reduces amount of data
        # - reduces gaps in data (gives a nicer chart)
        chart_df = data.filter(items=chart["series"]).dropna(axis=0, how="all").resample('100L', how="first")
        if chart_df.columns.size > 0:
            path = os.path.join(output_dir, bag_name, chart["name"])
            if not os.path.exists(path):
                os.makedirs(path)

            print "Exporting chart %s to %s" % (chart["name"], path)

            os.chdir(path)
            vis = bearcart.Chart(chart_df)
            vis.create_chart(html_path=HTML_FILE, data_path=DATA_FILE,
                             js_path=JS_FILE, css_path=CSS_FILE)

    #
    # KML export
    #
    base = utm.fromLatLong(47.388650, 8.033269, 368)
    chart_df = data.filter(items=KML_SERIES).dropna(axis=0, how="all").resample('100L', how="first")
    if chart_df.columns.size > 0:
        mavros = KML_SERIES[0] in chart_df.columns
        path = os.path.join(output_dir, bag_name + '.kml')
        print "Exporting KML to %s" % (path)

        fid = open(path, 'w')
        fid.write(kmlhead(bag_name))
        for row_index, row in chart_df.iterrows():
            x = base.easting
            y = base.northing
            a = base.altitude

            # if (math.isnan(row[KML_SERIES[0]]) or math.isnan(row[KML_SERIES[1]]) or math.isnan(row[KML_SERIES[2]])):
            #     print "found nan"
            #     print row

            if mavros and not (math.isnan(row[KML_SERIES[0]]) or math.isnan(row[KML_SERIES[1]]) or math.isnan(row[KML_SERIES[2]])):
                x = row[KML_SERIES[0]]
                y = row[KML_SERIES[1]]
                a = row[KML_SERIES[2]]

            elif not mavros and not (math.isnan(row[KML_SERIES[3]]) or math.isnan(row[KML_SERIES[4]]) or math.isnan(row[KML_SERIES[5]])):
                x = x + row[KML_SERIES[3]]
                y = y + row[KML_SERIES[4]]
                a = a - row[KML_SERIES[5]]

            else:
                continue

            p = utm.UTMPoint(x, y, zone=base.zone, band=base.band).toMsg()

            fid.write("%f,%f,%f " % (p.longitude, p.latitude, a))

        fid.write(kmltail("absolute"))
        fid.close()