Example #1
0
    def associate(self, offset=0.0, max_difference=0.02):
        self.first_list = associate.read_file_list(self.groundtruthfile)
        self.second_list = associate.read_file_list(self.resultsfile)

        self.matches = associate.associate(self.first_list, self.second_list,
                                           float(offset),
                                           float(max_difference))
        if len(self.matches) < 2:
            sys.exit(
                "Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?"
            )
def rmse(first_file, second_file):

    first_list = associate.read_file_list(first_file)
    second_list = associate.read_file_list(second_file)

    matches = associate.associate(first_list, second_list, 0.0, 0.02)
    if len(matches) < 2:
        sys.exit(
            "Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?"
        )

    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]]
                              for a, b in matches]).transpose()
    second_xyz = numpy.matrix([[float(value) for value in second_list[b][0:3]]
                               for a, b in matches]).transpose()
    rot, trans, trans_error = align(second_xyz, first_xyz)

    return numpy.sqrt(numpy.dot(trans_error, trans_error) / len(trans_error))
Example #3
0
def main():
    defaultgroundtruth = 'rgbd_dataset_freiburg3_long_office_household/groundtruth.txt'
    default_rgb = 'rgbd_dataset_freiburg3_long_office_household/rgb.txt'
    default_depth = 'rgbd_dataset_freiburg3_long_office_household/depth.txt'
    default_output = 'rgbd_dataset_freiburg3_long_office_household'
    # Parse command line argument
    p = argparse.ArgumentParser()
    p.add_argument('--groundtruth_file', default=defaultgroundtruth)
    p.add_argument('--rgb_file', default=default_rgb)
    p.add_argument('--depth_file', default=default_depth)
    p.add_argument('--output_path', default=default_output)
    args = p.parse_args()

    # Associate 3 files
    assc_data = associate(args.groundtruth_file, [args.rgb_file, args.depth_file])

    # Parse data
    n_samples = len(assc_data)
    base_path_rgb = os.path.dirname(args.rgb_file)
    base_path_depth = os.path.dirname(args.depth_file)
    timestamps = np.zeros(n_samples, dtype='float32')
    gtruth = np.zeros((n_samples, 7), dtype='float32')
    rgb = np.zeros((n_samples, 3, 480, 640), dtype='uint8')
    depth  = np.zeros((n_samples, 1, 480, 640), dtype='uint16')
    for i, d in enumerate(assc_data):
        print('processing {}/{}'.format(i+1, n_samples)) 
        timestamps[i] = d[0][0]
        gtruth[i] = map(float, d[0][1:])
        rgb_img = Image.open(open(os.path.join(base_path_rgb, d[1][1])))
        rgb[i] = img_to_array(rgb_img, 'uint8')
        depth_img = Image.open(open(os.path.join(base_path_depth, d[2][1])))
        depth[i] = img_to_array(depth_img, 'uint16')
    print(np.max(depth))
    # Save
    np.savez(args.output_path, 
             timestamps=timestamps, groundtruth=gtruth, rgb=rgb, depth=depth)
Example #4
0
if not os.path.exists(filename_zip):
    print('Downloading dataset file ', filename_zip)
    testfile = URLopener()
    testfile.retrieve(dataset_url, filename_zip)

if not os.path.exists(filename):
    print('Extracting dataset ', filename)
    tar = tarfile.open(filename_zip, "r:gz")
    tar.extractall()
    tar.close()

if not os.path.exists(filename + '/depth_gt.txt'):
    first_list = associate.read_file_list(filename + '/depth.txt')
    second_list = associate.read_file_list(filename + '/groundtruth.txt')

    matches = associate.associate(first_list, second_list, 0.0, 0.02)

    f = open(filename + '/depth_gt.txt', 'w')
    for a, b in matches:
        f.write(
            "%f %s %f %s\n" %
            (a, " ".join(first_list[a]), b - 0.0, " ".join(second_list[b])))
    f.close()

print('Dataset is ready.')

os.system(
    'rosrun ewok_ring_buffer tum_rgbd_ring_buffer_example rgbd_dataset_freiburg2_pioneer_slam res.txt'
)

print('Results are ready.')
Example #5
0
    parser.add_argument('--offset', help='time offset added to the timestamps of the second file (default: 0.0)',default=0.0)
    parser.add_argument('--scale', help='scaling factor for the second trajectory (default: 1.0)',default=1.0)
    parser.add_argument('--max_difference', help='maximally allowed time difference for matching entries (default: 0.02)',default=0.02)
    parser.add_argument('--save', help='save aligned second trajectory to disk (format: stamp2 x2 y2 z2)')
    parser.add_argument('--save_associations', help='save associated first and aligned second trajectory to disk (format: stamp1 x1 y1 z1 stamp2 x2 y2 z2)')
    parser.add_argument('--plot', help='plot the first and the aligned second trajectory to an image (format: png)')
    parser.add_argument('--verbose', help='print all evaluation data (otherwise, only the RMSE absolute translational error in meters after alignment will be printed)', action='store_true')
    args = parser.parse_args()

    first_list = associate.read_file_list(args.first_file)
    second_list = associate.read_file_list(args.second_file)
    
    if args.interpolate:
       first_list = interpolate_and_resample(first_list,list(second_list.keys()),float(args.offset),float(args.max_difference)*2)

    matches = associate.associate(first_list, second_list,float(args.offset),float(args.max_difference))    
    if len(matches)<2:
        sys.exit("Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?")

    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]] for a,b in matches]).transpose()
    second_xyz = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for a,b in matches]).transpose()
    rot,trans,trans_error = align(second_xyz,first_xyz)

   

    second_xyz_aligned = rot * second_xyz + trans
    
    first_stamps = first_list.keys()
    first_stamps.sort()
    first_xyz_full = numpy.matrix([[float(value) for value in first_list[b][0:3]] for b in first_stamps]).transpose()
    
def evaluate(first_file, second_file):
    first_list = associate.read_file_list(first_file)
    second_list = associate.read_file_list(second_file)

    matches = associate.associate(first_list, second_list, 0.0, 0.02)

    if len(matches) < 2:
        sys.exit(
            "Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?"
        )

    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]]
                              for a, b in matches]).transpose()
    second_xyz = numpy.matrix(
        [[float(value) * float(1.0) for value in second_list[b][0:3]]
         for a, b in matches]).transpose()

    first_quat = numpy.matrix([[float(value) for value in first_list[a][3:7]]
                               for a, b in matches])
    second_quat = numpy.matrix(
        [[float(value) * float(1.0) for value in second_list[b][3:7]]
         for a, b in matches])

    first_quats = []

    rot, trans, trans_error = align(second_xyz, first_xyz)

    for quat_1 in first_quat:
        quat_1_array = numpy.squeeze(numpy.asarray(quat_1))

        quat_constr = quat(quat_1_array[3], quat_1_array[0], quat_1_array[1],
                           quat_1_array[2])
        first_quats.append(quat_constr)

    counter = 0

    rot_errors = []

    for quat_1 in second_quat:
        quat_1_array = numpy.squeeze(numpy.asarray(quat_1))
        quat_constr = quat(quat_1_array[3], quat_1_array[0], quat_1_array[1],
                           quat_1_array[2])
        orientation_aligned = first_quats[
            counter].inverse.rotation_matrix * rot * quat_constr.rotation_matrix
        rot_errors.append(angle(orientation_aligned))
        counter += 1

    first_stamps = sorted(first_list)
    second_stamps = sorted(second_list)
    second_xyz_full = numpy.matrix(
        [[float(value) for value in second_list[b][0:3]]
         for b in second_stamps]).transpose()

    print("compared_pose_pairs " + str(len(trans_error)) + " pairs")

    print("alignment transformation R + t is")
    print(rot)
    print(trans)

    print("absolute_translational_error.rmse " + str(
        numpy.sqrt(numpy.dot(trans_error, trans_error) / len(trans_error))) +
          " m")
    print("absolute_translational_error.mean " + str(numpy.mean(trans_error)) +
          " m")
    print("absolute_translational_error.median " +
          str(numpy.median(trans_error)) + " m")
    print("absolute_translational_error.std " + str(numpy.std(trans_error)) +
          " m")
    print("absolute_translational_error.min " + str(numpy.min(trans_error)) +
          " m")
    print("absolute_translational_error.max " + str(numpy.max(trans_error)) +
          " m")

    print()

    print("absolute_rotational_error.rmse " +
          str(numpy.sqrt(numpy.dot(rot_errors, rot_errors) /
                         len(rot_errors))) + " rad")
    print("absolute_rotational_error.mean " + str(numpy.mean(rot_errors)) +
          " rad")
    print("absolute_rotational_error.median " + str(numpy.median(rot_errors)) +
          " rad")
    print("absolute_rotational_error.std " + str(numpy.std(rot_errors)) +
          " rad")
    print("absolute_rotational_error.min " + str(numpy.min(rot_errors)) +
          "  rad")
    print("absolute_rotational_error.max " + str(numpy.max(rot_errors)) +
          " rad")

    return numpy.sqrt(numpy.dot(trans_error, trans_error) /
                      len(trans_error)), numpy.sqrt(
                          numpy.dot(rot_errors, rot_errors) / len(rot_errors))
Example #7
0
def evaluate_ate(first_list, second_list, _args=""):
    # parse command line
    parser = argparse.ArgumentParser(
        description=
        'This script computes the absolute trajectory error from the ground truth trajectory and the estimated trajectory.'
    )
    # parser.add_argument('first_file', help='ground truth trajectory (format: timestamp tx ty tz qx qy qz qw)')
    # parser.add_argument('second_file', help='estimated trajectory (format: timestamp tx ty tz qx qy qz qw)')
    parser.add_argument(
        '--offset',
        help=
        'time offset added to the timestamps of the second file (default: 0.0)',
        default=0.0)
    parser.add_argument(
        '--scale',
        help='scaling factor for the second trajectory (default: 1.0)',
        default=1.0)
    parser.add_argument(
        '--max_difference',
        help=
        'maximally allowed time difference for matching entries (default: 0.02)',
        default=0.02)
    parser.add_argument(
        '--save',
        help='save aligned second trajectory to disk (format: stamp2 x2 y2 z2)'
    )
    parser.add_argument(
        '--save_associations',
        help=
        'save associated first and aligned second trajectory to disk (format: stamp1 x1 y1 z1 stamp2 x2 y2 z2)'
    )
    parser.add_argument(
        '--plot',
        help=
        'plot the first and the aligned second trajectory to an image (format: png)'
    )
    parser.add_argument(
        '--verbose',
        help=
        'print all evaluation data (otherwise, only the RMSE absolute translational error in meters after alignment will be printed)',
        action='store_true')
    args = parser.parse_args(_args)

    # first_list = associate.read_file_list(args.first_file)
    # second_list = associate.read_file_list(args.second_file)

    matches = associate.associate(first_list, second_list, float(args.offset),
                                  float(args.max_difference))
    if len(matches) < 2:
        raise ValueError(
            "Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?"
        )

    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]]
                              for a, b in matches]).transpose()
    second_xyz = numpy.matrix(
        [[float(value) * float(args.scale) for value in second_list[b][0:3]]
         for a, b in matches]).transpose()
    rot, trans, trans_error = align(second_xyz, first_xyz)

    second_xyz_aligned = rot * second_xyz + trans

    first_stamps = list(first_list.keys())
    first_stamps.sort()
    first_xyz_full = numpy.matrix(
        [[float(value) for value in first_list[b][0:3]]
         for b in first_stamps]).transpose()

    second_stamps = list(second_list.keys())
    second_stamps.sort()
    second_xyz_full = numpy.matrix(
        [[float(value) * float(args.scale) for value in second_list[b][0:3]]
         for b in second_stamps]).transpose()
    second_xyz_full_aligned = rot * second_xyz_full + trans

    if args.verbose:
        print("compared_pose_pairs %d pairs" % (len(trans_error)))

        print(
            "absolute_translational_error.rmse %f m" %
            numpy.sqrt(numpy.dot(trans_error, trans_error) / len(trans_error)))
        print("absolute_translational_error.mean %f m" %
              numpy.mean(trans_error))
        print("absolute_translational_error.median %f m" %
              numpy.median(trans_error))
        print("absolute_translational_error.std %f m" % numpy.std(trans_error))
        print("absolute_translational_error.min %f m" % numpy.min(trans_error))
        print("absolute_translational_error.max %f m" % numpy.max(trans_error))

    if args.save_associations:
        file = open(args.save_associations, "w")
        file.write("\n".join([
            "%f %f %f %f %f %f %f %f" % (a, x1, y1, z1, b, x2, y2, z2)
            for (a, b), (x1, y1,
                         z1), (x2, y2,
                               z2) in zip(matches,
                                          first_xyz.transpose().A,
                                          second_xyz_aligned.transpose().A)
        ]))
        file.close()

    if args.save:
        file = open(args.save, "w")
        file.write("\n".join([
            "%f " % stamp + " ".join(["%f" % d for d in line])
            for stamp, line in zip(second_stamps,
                                   second_xyz_full_aligned.transpose().A)
        ]))
        file.close()

    if args.plot:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        import matplotlib.pylab as pylab
        from matplotlib.patches import Ellipse
        fig = plt.figure()
        ax = fig.add_subplot(111)
        plot_traj(ax, first_stamps,
                  first_xyz_full.transpose().A, '-', "black", "ground truth")
        plot_traj(ax, second_stamps,
                  second_xyz_full_aligned.transpose().A, '-', "blue",
                  "estimated")

        label = "difference"
        for (a, b), (x1, y1,
                     z1), (x2, y2,
                           z2) in zip(matches,
                                      first_xyz.transpose().A,
                                      second_xyz_aligned.transpose().A):
            ax.plot([x1, x2], [y1, y2], '-', color="red", label=label)
            label = ""

        ax.legend()

        ax.set_xlabel('x [m]')
        ax.set_ylabel('y [m]')
        plt.savefig(args.plot, dpi=90)

    return {
        "compared_pose_pairs": (len(trans_error)),
        "absolute_translational_error.rmse":
        numpy.sqrt(numpy.dot(trans_error, trans_error) / len(trans_error)),
        "absolute_translational_error.mean":
        numpy.mean(trans_error),
        "absolute_translational_error.median":
        numpy.median(trans_error),
        "absolute_translational_error.std":
        numpy.std(trans_error),
        "absolute_translational_error.min":
        numpy.min(trans_error),
        "absolute_translational_error.max":
        numpy.max(trans_error),
    }
def align_sim3(model, data):
    """Implementation of the paper: S. Umeyama, Least-Squares Estimation
       of Transformation Parameters Between Two Point Patterns,
       IEEE Trans. Pattern Anal. Mach. Intell., vol. 13, no. 4, 1991.
       Input:
           model -- first trajectory (3xn)
           data -- second trajectory (3xn)
       Output:
           s -- scale factor (scalar)
           R -- rotation matrix (3x3)
           t -- translation vector (3x1)
           t_error -- translational error per point (1xn)
    """
    # substract mean
    mu_M = model.mean(0).reshape(model.shape[0],1)
    mu_D = data.mean(0).reshape(data.shape[0],1)
    model_zerocentered = model - mu_M
    data_zerocentered = data - mu_D
    n = np.shape(model)[0]
    
    # correlation
    C = 1.0/n*np.dot(model_zerocentered.transpose(), data_zerocentered)
    sigma2 = 1.0/n*np.multiply(data_zerocentered,data_zerocentered).sum()
    U_svd,D_svd,V_svd = np.linalg.linalg.svd(C)
    D_svd = np.diag(D_svd)
    V_svd = np.transpose(V_svd)
    S = np.eye(3)

    if(np.linalg.det(U_svd)*np.linalg.det(V_svd) < 0):
    S[2,2] = -1

    R = np.dot(U_svd, np.dot(S, np.transpose(V_svd)))
    s = 1.0/sigma2*np.trace(np.dot(D_svd, S))
    t = mu_M-s*np.dot(R,mu_D)
    # TODO:
    model_aligned = s * R * model + t
    alignment_error = model_aligned - data
    t_error = np.sqrt(np.sum(np.multiply(alignment_error,alignment_error),0)).A[0]

    #return s, R, t #, t_error
    return s, R, t, t_erro

# 欧式变换误差
"""
由于真实轨迹录制时的坐标系和算法一开始的坐标系存在差异,
所以算法估计的相机轨迹和真实轨迹之间存在一个欧式变换,
所以按照对估计值和真实值进行配准后,
需要求解真实值和匹配的估计值之间的一个欧式变换。

对估计值进行变换后再与真实值计算差值。

"""
def align(model,data):
    """Align two trajectories using the method of Horn (closed-form).
    匹配误差计算====
    Input:
    model -- first trajectory (3xn)    估计值
    data -- second trajectory (3xn)    真值=
    
    Output:
    rot -- rotation matrix (3x3)    两数据的旋转平移矩阵
    trans -- translation vector (3x1)
    trans_error -- translational error per point (1xn) 匹配误差
    
    """
    numpy.set_printoptions(precision=3,suppress=True)
    model_zerocentered = model - model.mean(1) # 去均值===
    data_zerocentered = data - data.mean(1)
    
    W = numpy.zeros( (3,3) )# 
    for column in range(model.shape[1]):
        W += numpy.outer(model_zerocentered[:,column],data_zerocentered[:,column])
        # outer() 前一个参数表示 后一个参数扩大倍数
        # https://blog.csdn.net/hqh131360239/article/details/79064592
    U,d,Vh = numpy.linalg.linalg.svd(W.transpose())# 奇异值分解
    S = numpy.matrix(numpy.identity( 3 ))# 单位阵
    if(numpy.linalg.det(U) * numpy.linalg.det(Vh)<0):
        S[2,2] = -1
    rot = U*S*Vh
    trans = data.mean(1) - rot * model.mean(1)
    
    model_aligned = rot * model + trans
    alignment_error = model_aligned - data
    # err = sqrt((x-x')^2 + (y-y')^2 + (z-z')^2) 
    trans_error = numpy.sqrt(numpy.sum(numpy.multiply(alignment_error,alignment_error),0)).A[0]
        
    return rot,trans,trans_error

def plot_traj(ax,stamps,traj,style,color,label):
    """
    Plot a trajectory using matplotlib. 
    2D图
    Input:
    ax -- the plot
    stamps -- time stamps (1xn)
    traj -- trajectory (3xn)
    style -- line style
    color -- line color
    label -- plot legend
    
    """
    stamps.sort()
    interval = numpy.median([s-t for s,t in zip(stamps[1:],stamps[:-1])])
    x = []
    y = []
    last = stamps[0]
    for i in range(len(stamps)):
        if stamps[i]-last < 2*interval:
            x.append(traj[i][0])
            y.append(traj[i][1])
        elif len(x)>0:
            ax.plot(x,y,style,color=color,label=label)
            label=""
            x=[]
            y=[]
        last= stamps[i]
    if len(x)>0:
        ax.plot(x,y,style,color=color,label=label)
 
def plot_traj3D(ax,stamps,traj,style,color,label):
    """
    Plot a trajectory using matplotlib. 
    3D图
    Input:
    ax -- the plot
    stamps -- time stamps (1xn)
    traj -- trajectory (3xn)
    style -- line style
    color -- line color
    label -- plot legend
    
    """
    stamps.sort()
    interval = numpy.median([s-t for s,t in zip(stamps[1:],stamps[:-1])])
    x = []
    y = []
    z = []
    last = stamps[0]
    for i in range(len(stamps)):
        if stamps[i]-last < 2*interval:
            x.append(traj[i][0])
            y.append(traj[i][1])
            z.append(traj[i][2])
        elif len(x)>0:
            ax.plot(x,y,z,style,color=color,label=label)
            
            label=""
            x=[]
            y=[]
            z=[]
        last= stamps[i]
    if len(x)>0:
        ax.plot(x,y,z,style,color=color,label=label)          

if __name__=="__main__":
    # parse command line
    # 解析命令行参数
    parser = argparse.ArgumentParser(description='''
    This script computes the absolute trajectory error from the ground truth trajectory and the estimated trajectory. 
    ''')
    parser.add_argument('first_file', help='ground truth trajectory (format: timestamp tx ty tz qx qy qz qw)')
    parser.add_argument('second_file', help='estimated trajectory (format: timestamp tx ty tz qx qy qz qw)')
    parser.add_argument('--offset', help='time offset added to the timestamps of the second file (default: 0.0)',default=0.0)
    parser.add_argument('--scale', help='scaling factor for the second trajectory (default: 1.0)',default=1.0)
    parser.add_argument('--max_difference', help='maximally allowed time difference for matching entries (default: 0.02)',default=0.02)
    parser.add_argument('--save', help='save aligned second trajectory to disk (format: stamp2 x2 y2 z2)')
    parser.add_argument('--save_associations', help='save associated first and aligned second trajectory to disk (format: stamp1 x1 y1 z1 stamp2 x2 y2 z2)')
    parser.add_argument('--plot', help='plot the first and the aligned second trajectory to an image (format: png)')
    parser.add_argument('--plot3D', help='plot the first and the aligned second trajectory to as interactive 3D plot (format: png)', action = 'store_true')
    parser.add_argument('--verbose', help='print all evaluation data (otherwise, only the RMSE absolute translational error in meters after alignment will be printed)', action='store_true')
    args = parser.parse_args()
    
    # 读取数据
    first_list = associate.read_file_list(args.first_file)
    second_list = associate.read_file_list(args.second_file)
    
    # 按照时间戳进行匹配,最大差值不能超过 max_difference 0.02
    # 
    matches = associate.associate(first_list, second_list,float(args.offset),float(args.max_difference))    
    if len(matches)<2:
        sys.exit("Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?")
    
    # 按照匹配对 仅取出x、y、z位置信息
    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]] for a,b in matches]).transpose()
    second_xyz = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for a,b in matches]).transpose()
    
    # 对匹配后的位置坐标求解误差====
    rot,trans,trans_error = align(second_xyz,first_xyz)
    
    # 相互匹配误差线  
    second_xyz_aligned = rot * second_xyz + trans
    first_stamps = first_list.keys()
    first_stamps.sort()
    first_xyz_full = numpy.matrix([[float(value) for value in first_list[b][0:3]] for b in first_stamps]).transpose()
    second_stamps = second_list.keys()
    second_stamps.sort()
    second_xyz_full = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for b in second_stamps]).transpose()
    second_xyz_full_aligned = rot * second_xyz_full + trans
    
    if args.verbose:
        print "compared_pose_pairs %d pairs"%(len(trans_error))
        # 均方根误差 误差平方均值 再开根号
        print "absolute_translational_error.rmse %f m"%numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error))
        # 误差均值
        print "absolute_translational_error.mean %f m"%numpy.mean(trans_error)
        # 误差中值
        print "absolute_translational_error.median %f m"%numpy.median(trans_error)
        # 误差标准差
        print "absolute_translational_error.std %f m"%numpy.std(trans_error)
        # 误差最小值
        print "absolute_translational_error.min %f m"%numpy.min(trans_error)
        # 误差最大值
        print "absolute_translational_error.max %f m"%numpy.max(trans_error)
    else:
        print "%f"%numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error))
        
    if args.save_associations:
        file = open(args.save_associations,"w")
        file.write("\n".join(["%f %f %f %f %f %f %f %f"%(a,x1,y1,z1,b,x2,y2,z2) for (a,b),(x1,y1,z1),(x2,y2,z2) in zip(matches,first_xyz.transpose().A,second_xyz_aligned.transpose().A)]))
        file.close()
        
    if args.save:
        file = open(args.save,"w")
        file.write("\n".join(["%f "%stamp+" ".join(["%f"%d for d in line]) for stamp,line in zip(second_stamps,second_xyz_full_aligned.transpose().A)]))
        file.close()

    if args.plot:
        # 绘制2D图=====
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        import matplotlib.pylab as pylab
        from matplotlib.patches import Ellipse
        fig = plt.figure()
        ax = fig.add_subplot(111)
        plot_traj(ax,first_stamps,first_xyz_full.transpose().A,'-',"black","ground truth")
        plot_traj(ax,second_stamps,second_xyz_full_aligned.transpose().A,'-',"blue","estimated")
        
        # 误差线
        #label="difference"
        #for (a,b),(x1,y1,z1),(x2,y2,z2) in zip(matches,first_xyz.transpose().A,second_xyz_aligned.transpose().A):
        #    ax.plot([x1,x2],[y1,y2],'-',color="red",label=label)
        #    label=""
            
        ax.legend()
            
        ax.set_xlabel('x [m]')
        ax.set_ylabel('y [m]')
        plt.savefig(args.plot,dpi=90)
        
    if args.plot3D:
        # 绘制3D图======
        import matplotlib as mpl
        mpl.use('Qt4Agg')
        from mpl_toolkits.mplot3d import Axes3D
        import numpy as np
        import matplotlib.pyplot as plt
        fig = plt.figure()
        ax = fig.gca(projection='3d')
        #        ax = fig.add_subplot(111)
        plot_traj3D(ax,first_stamps,first_xyz_full.transpose().A,'-',"black","groundTruth")
        plot_traj3D(ax,second_stamps,second_xyz_full_aligned.transpose().A,'-',"blue","orb-slam2-flow")
        
        # 误差线
        #label="difference"
        #for (a,b),(x1,y1,z1),(x2,y2,z2) in zip(matches,first_xyz.transpose().A,second_xyz_aligned.transpose().A):
        #    ax.plot([x1,x2],[y1,y2],[z1,z2],'-',color="red",label=label)
        #    label=""            
        
        ax.legend()
        ax.set_xlabel('x [m]')
        ax.set_ylabel('y [m]')
        print "Showing"
        plt.show(block=True)
        plt.savefig("./test.png",dpi=90)
def compare_traj(namelist, dirlist, gt_dir, save_dir=None, plot=False):
    if len(dirlist) < 2 or len(dirlist) != len(namelist):
        return

    if save_dir is not None:
        if not os.path.exists(save_dir):
            os.makedirs(save_dir)

    valid_path = []
    for path in dirlist:
        files = [
            x for x in os.listdir(path) if x[:4] == 'est_' and x[-4:] == '.txt'
        ]
        if not os.path.isdir(path) or len(files) == 0:
            print('Invalid path:%s' % path)
            continue
        valid_path.append(path)

    colors = [
        'red', 'blue', 'green', 'purple', 'pink', 'sienna', 'gray', 'yellow',
        'black', 'gold', 'darkcyan'
    ]
    datanames = sorted([
        x[4:-4] for x in os.listdir(dirlist[0])
        if x[:4] == 'est_' and x[-4:] == '.txt'
    ])
    for dataname in datanames:
        plt.figure()
        plt.title(dataname)
        gt_file = os.path.join(gt_dir, '%s.txt' % dataname)
        gt_list = associate.read_file_list(gt_file)
        gt_stamps = gt_list.keys()
        gt_stamps.sort()
        gt_traj = np.matrix([[float(value) for value in gt_list[b][0:3]]
                             for b in gt_stamps]).transpose()
        plt.plot(gt_traj[0, :].T,
                 gt_traj[1, :].T,
                 colors[0],
                 label='ground truth')
        plt.plot(gt_traj[0, 0], gt_traj[1, 0], 'yp', markersize=8)

        cidx = 1
        for name, path in zip(namelist, dirlist):
            traj_file = os.path.join(path, 'est_%s.txt' % dataname)
            if not os.path.exists(traj_file):
                continue
            traj_list = associate.read_file_list(traj_file)
            matches = associate.associate(gt_list, traj_list, 0, 0.02)
            if len(matches) < 2:
                continue
            gt_xyz = np.matrix([[float(value) for value in gt_list[a][0:3]]
                                for a, b in matches]).transpose()
            traj_xyz = np.matrix(
                [[float(value) for value in traj_list[b][0:3]]
                 for a, b in matches]).transpose()
            rot, trans, trans_error = align(traj_xyz, gt_xyz)
            aligned_traj = rot * traj_xyz + trans
            if np.max(trans_error) < 100:
                plt.plot(aligned_traj[0, :].T,
                         aligned_traj[1, :].T,
                         colors[cidx % len(colors)],
                         label=name)
            cidx += 1
        plt.legend()
        if save_dir is not None:
            plt.savefig(os.path.join(save_dir, '%s.png' % dataname))
    if plot:
        plt.show()
Example #10
0
# config -------------------------------------------------------------------------

trace_name='ptam_i7_rig3'
n_align_frames = 300

img_prefix = '../trace/'+trace_name+'/'+trace_name+'_'
traj_groundtruth = '../trace/'+trace_name+'/traj_groundtruth.txt'
traj_estimate = '../trace/'+trace_name+'/traj_estimate.txt'

#---------------------------------------------------------------------------------

first_list = associate.read_file_list(traj_groundtruth)
second_list = associate.read_file_list(traj_estimate)

matches = associate.associate(first_list, second_list,float(0.0),float(0.02))
if len(matches)<2:
    sys.exit("Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?")

# read data in nupmy matrix
first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]] for a,b in matches]).transpose()
second_xyz = numpy.matrix([[float(value) for value in second_list[b][0:3]] for a,b in matches]).transpose()

# align Sim3
scale,rot,trans,trans_error = compare_two_trajectories.alignSim3(first_xyz,second_xyz, n_align_frames)

second_xyz_aligned = scale*rot*second_xyz+trans
alignment_error = second_xyz_aligned - first_xyz
trans_error = numpy.sqrt(numpy.sum(numpy.multiply(alignment_error,alignment_error),0)).A[0]

print 's='+str(scale)
Example #11
0
def evaluate_ate(first_timetraj,
                 second_timetraj,
                 offset=0.0,
                 max_difference=0.02,
                 save_associations=None,
                 plot=None,
                 major_axes=None,
                 plot_3d=None,
                 verbose=False):
    """
    Copmute ATE
    Input:
    first_list -- {time stamp: pose}
    second_list -- {time stamp: pose}
    """

    matches = associate.associate(first_timetraj, second_timetraj, offset,
                                  max_difference)
    matches = np.array(matches)
    if len(matches) < 2:
        sys.exit(
            "Couldn't find matching timestamp pairs between groundtruth and "
            "estimated trajectory! Did you choose the correct sequence?")

    tr_ratio = track_ratio(first_timetraj, matches)
    if tr_ratio < 0.5:
        print("matched trajectory is too short", tr_ratio)
        raise ValueError()

    # txyz: 4xn matrix = rows of timestamp, x, y, z
    first_txyz, first_xyz_sync = extract_trajmatrix(first_timetraj,
                                                    matches[:, 0], major_axes)
    second_txyz, second_xyz_sync = extract_trajmatrix(second_timetraj,
                                                      matches[:,
                                                              1], major_axes)

    rot, trans, trans_error, first_xyz_sync, second_xyz_sync \
        = align_99_percent(first_xyz_sync, second_xyz_sync, 0.01)
    if np.mean(trans_error) > 100.0:
        print("error is too large:", np.mean(trans_error))
        raise ValueError()

    first_xyz_sync, second_xyz_sync = reduce_matches(first_xyz_sync,
                                                     second_xyz_sync)
    second_txyz[1:, :] = rot * second_txyz[1:, :] + trans
    second_xyz_sync = rot * second_xyz_sync + trans

    association = np.array(
        [[a, x1, y1, z1, b, x2, y2, z2]
         for (a, b), (x1, y1, z1), (x2, y2,
                                    z2) in zip(matches,
                                               first_xyz_sync.transpose().A,
                                               second_xyz_sync.transpose().A)])
    print("raw and association shapes", first_txyz.shape, first_xyz_sync.shape)

    print_stats(trans_error, verbose)

    if save_associations:
        print("save associations:", save_associations)
        np.savetxt(save_associations, association, fmt="%1.5f")

    if plot:
        plot2d(first_txyz, first_xyz_sync, second_txyz, second_xyz_sync, plot)

    if plot_3d:
        plot3d(first_txyz, first_xyz_sync, second_txyz, second_xyz_sync, plot)

    association = np.array(association, dtype=np.float64)
    return rot, trans, trans_error, association
Example #12
0
def main(args) :    
    first_list = associate.read_file_list(args.first_file)
    second_list = associate.read_file_list(args.second_file)

    matches = associate.associate(first_list, second_list,float(args.offset),float(args.max_difference))    
    if len(matches)<2:
        sys.exit("Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?")


    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]] for a,b in matches]).transpose()
    second_xyz = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for a,b in matches]).transpose()
    rot,trans,trans_error = align(second_xyz,first_xyz)
    
    second_xyz_aligned = rot * second_xyz + trans
    
    first_stamps = first_list.keys()
    first_stamps.sort()
    first_xyz_full = numpy.matrix([[float(value) for value in first_list[b][0:3]] for b in first_stamps]).transpose()
    
    second_stamps = second_list.keys()
    second_stamps.sort()
    second_xyz_full = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for b in second_stamps]).transpose()
    second_xyz_full_aligned = rot * second_xyz_full + trans
    
    if args.verbose:
        print "compared_pose_pairs %d pairs"%(len(trans_error))

        print "absolute_translational_error.rmse %f m"%numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error))
        print "absolute_translational_error.mean %f m"%numpy.mean(trans_error)
        print "absolute_translational_error.median %f m"%numpy.median(trans_error)
        print "absolute_translational_error.std %f m"%numpy.std(trans_error)
        print "absolute_translational_error.min %f m"%numpy.min(trans_error)
        print "absolute_translational_error.max %f m"%numpy.max(trans_error)
    else:
        print "%f"%numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error))
        
    if args.save_associations:
        file = open(args.save_associations,"w")
        file.write("\n".join(["%f %f %f %f %f %f %f %f"%(a,x1,y1,z1,b,x2,y2,z2) for (a,b),(x1,y1,z1),(x2,y2,z2) in zip(matches,first_xyz.transpose().A,second_xyz_aligned.transpose().A)]))
        file.close()
        
    if args.save:
        file = open(args.save,"w")
        file.write("\n".join(["%f "%stamp+" ".join(["%f"%d for d in line]) for stamp,line in zip(second_stamps,second_xyz_full_aligned.transpose().A)]))
        file.close()

    if args.plot:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        import matplotlib.pylab as pylab
        from matplotlib.patches import Ellipse
	matplotlib.rcParams.update({'font.size': 22})
        fig = plt.figure()
        ax = fig.add_subplot(111)
        plot_traj(ax,first_stamps,first_xyz_full.transpose().A,'-',"black","ground truth")
        plot_traj(ax,second_stamps,second_xyz_full_aligned.transpose().A,'-',"blue","estimated")

        label="difference"
        for (a,b),(x1,y1,z1),(x2,y2,z2) in zip(matches,first_xyz.transpose().A,second_xyz_aligned.transpose().A):
            ax.plot([x1,x2],[y1,y2],'-',color="red",label=label)
            label=""
            
       # ax.legend()
            
        #ax.set_xlabel('x [m]',fontsize=40)
        #ax.set_ylabel('y [m]',fontsize=40)
	title=args.plot.split('/')[0][13:].replace('_','/').replace('freiburg','fr')
        ax.set_title(title,fontsize=40)
        plt.savefig(args.plot,dpi=250)
    
    return trans_error
Example #13
0
def ate(first_file,
        second_file,
        offset=0,
        max_diff=0.02,
        scale=1.0,
        plot=1,
        save_png=None):
    first_list = associate.read_file_list(first_file)
    second_list = associate.read_file_list(second_file)

    matches = associate.associate(first_list, second_list, float(offset),
                                  float(max_diff))
    if len(matches) < 2:
        sys.exit(
            "Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?"
        )

    first_xyz = np.matrix([[float(value) for value in first_list[a][0:3]]
                           for a, b in matches]).transpose()
    second_xyz = np.matrix(
        [[float(value) * float(scale) for value in second_list[b][0:3]]
         for a, b in matches]).transpose()
    rot, trans, trans_error = align(second_xyz, first_xyz)

    first_stamps = first_list.keys()
    first_stamps.sort()
    first_xyz_full = np.matrix([[float(value) for value in first_list[b][0:3]]
                                for b in first_stamps]).transpose()

    second_stamps = second_list.keys()
    second_stamps.sort()
    second_xyz_full = np.matrix(
        [[float(value) * float(scale) for value in second_list[b][0:3]]
         for b in second_stamps]).transpose()
    second_xyz_full_aligned = rot * second_xyz_full + trans

    rmse = np.sqrt(np.dot(trans_error, trans_error) / len(trans_error))
    emean = np.mean(trans_error)
    emedian = np.median(trans_error)
    estd = np.std(trans_error)
    emin = np.min(trans_error)
    emax = np.max(trans_error)

    name = os.path.basename(first_file)[:-4]

    if save_png is not None or plot:
        fig = plt.figure()
        ax = fig.add_subplot(111)
        plot_traj(ax, first_stamps,
                  first_xyz_full.transpose().A, '-', "red", "ground truth")
        plot_traj(ax, second_stamps,
                  second_xyz_full_aligned.transpose().A, '-', "blue",
                  "estimated")
        ax.legend()
        ax.set_xlabel('x [m]')
        ax.set_ylabel('y [m]')
        plt.title(name)
    if save_png is not None:
        plt.savefig(save_png)
    if plot:
        plt.show()
    return len(trans_error), rmse, emean, emedian, estd, emin, emax

groundtruth_file = sys.argv[1]
imu_file = sys.argv[2]
cmv_file = sys.argv[3]

groundtruth = associate.read_file_list(groundtruth_file, timestamp_scale=1e-9)
imu_prior = associate.read_file_list(imu_file)
cvm_prior = associate.read_file_list(cmv_file)

# convert from list to pose dictionary
groundtruth_poses = parseGroundtruth(groundtruth)

# get only the ones contained in both imu and cvm
imu_cvm_matches = associate.associate(imu_prior,
                                      cvm_prior,
                                      offset=0.0,
                                      max_difference=0.007)
imu_prior = {i[0]: imu_prior[i[0]] for i in imu_cvm_matches}
cvm_prior = {i[0]: cvm_prior[i[1]] for i in imu_cvm_matches}
# get only the groundtruth contained in both imu and cvm (and the one previous to it!)
# imu_groundtruth_matches = associate.associate(imu_prior, groundtruth, offset=0.0, max_difference=0.02)
# groundtruth = {i[0]: groundtruth[ i[0] ] for i in imu_groundtruth_matches}

# to relative
# groundtruth_relative = absoluteToRelative(groundtruth)

# --------------------------------------- relative poses of imu --------------------------------------- #
imu_relative, imu_reference_timestamps = navStateToPose(imu_prior)
required_timestamps = imu_reference_timestamps.keys(
) + imu_reference_timestamps.values()
# the associate function required dict
    return rot, trans, trans_error


# 读取数据=================================================================================
ground_truth_list = associate.read_file_list("./groundtruth.txt")  # 真实轨迹数据
src_list = associate.read_file_list("./src.txt")  # 原 orb-slam2 预测
flow_list = associate.read_file_list("./flow3.txt")  # flow 光流加持
geom_list = associate.read_file_list("./geom.txt")  # geom 多视角几何加持
offset = 0.0  # 时间偏移量
max_difference = 0.02  # 最大时间差值
scale = 1.0  # 数据大小尺度

# 真实值与 原版本算法预测值 进行对比====================================
# 按照时间戳进行匹配,最大差值不能超过 max_difference 0.02
matches_src = associate.associate(ground_truth_list, src_list, float(offset),
                                  float(max_difference))
if len(matches_src) < 2:
    sys.exit(
        "Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?"
    )

# 按照匹配对 仅取出x、y、z位置信息
gt_src_ass_xyz = numpy.matrix(
    [[float(value) for value in ground_truth_list[a][0:3]]
     for a, b in matches_src]).transpose()
src_ass_xyz = numpy.matrix(
    [[float(value) * float(scale) for value in src_list[b][0:3]]
     for a, b in matches_src]).transpose()
# 对匹配后的位置坐标求解误差====
rot, trans, trans_error = align(src_ass_xyz, gt_src_ass_xyz)
# 原数据的所有值
    tar = tarfile.open('rgbd_dataset_freiburg1_xyz.tgz', 'r:gz')
    for item in tar:
        tar.extract(item)
    print 'Done.'
except:
    print "Error opening downloaded file"

#Run assoc
print "Associating timestamps"
first_list = associate.read_file_list(
    "rgbd_dataset_freiburg1_xyz/groundtruth.txt")
second_list = associate.read_file_list("rgbd_dataset_freiburg1_xyz/depth.txt")
third_list = associate.read_file_list("rgbd_dataset_freiburg1_xyz/rgb.txt")

#Match Groundtruth with depth
matchesDepth = associate.associate(first_list, second_list, 0.0, 0.02)

#Convert matched groundtruth-depth timestamps to full data tuples (gtTime, qx, qy, qz, qw, tx, ty, tz, depthTime, depthFile)
matchedDepthVals = []
for a, b in matchesDepth:
    matchedDepthVals.append([
        val for sublist in [[a], first_list[a], [b], second_list[b]]
        for val in sublist
    ])

#Convert tuple to dict of form {gtTime: [qx, qy, qz, qw, tx, ty, tz], depthTime: [depthFile])
matchedDepthDict = {elem[0]: elem[1:] for elem in matchedDepthVals}
#Match created tuple with color times
matchesColor = associate.associate(matchedDepthDict, third_list, 0.0, 0.02)

#Convert matched groundtruth-depth-color timestamps to full data tuples (gtTime, qx, qy, qz, qw, tx, ty, tz, depthTime, depthFile, colorTime, colorFile)
Example #17
0
    parser.add_argument('--verbose',
                        help='print all evaluation data (otherwise, only the RMSE absolute translational error in meters after alignment will be printed)',
                        action='store_true')
    args = parser.parse_args()

    # Get and associate data based on time
    first_list = associate.read_file_list(args.first_file)
    second_list = associate.read_file_list(args.second_file)
    first_stamps = {}
    for k in sorted(first_list):
        first_stamps[k] = first_list[k]
    second_stamps = {}
    for k in sorted(second_list):
        second_stamps[k] = second_list[k]

    matches = associate.associate(first_stamps, second_stamps, float(args.offset), float(args.max_difference))

    first_xyz = numpy.matrix([[float(value) for value in first_stamps[a][0:3]] for a, b in matches]).transpose()
    second_xyz = numpy.matrix(
        [[float(value) * float(args.scale) for value in second_stamps[b][0:3]] for a, b in matches]).transpose()

    # Compute Error
    alignment_error = second_xyz[:][0:2] - first_xyz[:][0:2]
    trans_error = numpy.sqrt(numpy.sum(numpy.multiply(alignment_error, alignment_error), 0)).A[0]

    # save trans_error
    root_dir = os.path.dirname(args.err_comp)
    save_dir = join(root_dir, '2d_ate_cdf')
    if not os.path.exists(save_dir):
        os.makedirs(save_dir)
    save_path = join(save_dir, args.model + '_errcdf.csv')
    def run(self):

        scene = self.sim.semantic_scene
        objects = scene.objects
        id = 0
        for obj in objects:
            if obj == None or obj.category == None or obj.category.name(
            ) not in self.include_classes:
                continue

            if self.verbose:
                print(f"Object name is: {obj.category.name()}")
            # Calculate distance to object center
            obj_center = obj.obb.to_aabb().center
            obj_center = np.expand_dims(obj_center, axis=0)
            distances = np.sqrt(np.sum((self.nav_pts - obj_center)**2, axis=1))

            # Get points with r_min < dist < r_max
            valid_pts = self.nav_pts[np.where(
                (distances > self.radius_min) * (distances < self.radius_max))]

            # Bin points based on angles [vertical_angle (10 deg/bin), horizontal_angle (10 deg/bin)]
            valid_pts_shift = valid_pts - obj_center

            dz = valid_pts_shift[:, 2]
            dx = valid_pts_shift[:, 0]
            dy = valid_pts_shift[:, 1]

            # Get yaw for binning
            valid_yaw = np.degrees(np.arctan2(dz, dx))
            nbins = 200
            bins = np.linspace(-180, 180, nbins + 1)
            bin_yaw = np.digitize(valid_yaw, bins)

            num_valid_bins = np.unique(bin_yaw).size

            spawns_per_bin = 1  #int(self.num_views / num_valid_bins) + 2
            print(f'spawns_per_bin: {spawns_per_bin}')

            if self.visualize:
                import matplotlib.cm as cm
                colors = iter(cm.rainbow(np.linspace(0, 1, nbins)))
                plt.figure(2)
                plt.clf()
                print(np.unique(bin_yaw))
                for bi in range(nbins):
                    cur_bi = np.where(bin_yaw == (bi + 1))
                    points = valid_pts[cur_bi]
                    x_sample = points[:, 0]
                    z_sample = points[:, 2]
                    plt.plot(z_sample, x_sample, 'o', color=next(colors))
                plt.plot(obj_center[:, 2],
                         obj_center[:, 0],
                         'x',
                         color='black')
                plt.show()

            action = "do_nothing"
            episodes = []
            valid_pts_selected = []
            cnt = 0
            texts_images = []
            texts_depths = []
            rots_cam0 = []
            camXs_T_camX0_4x4 = []
            camX0_T_camXs_4x4 = []
            origin_T_camXs = []
            origin_T_camXs_t = []
            rots_cam0.append([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0])
            rots_origin = []
            for b in range(nbins):

                # get all angle indices in the current bin range
                inds_bin_cur = np.where(
                    bin_yaw == (b + 1))  # bins start 1 so need +1

                if inds_bin_cur[0].size == 0:
                    print("Continuing")
                    break

                for s in range(spawns_per_bin):
                    if b == 0:
                        np.random.seed(1)
                        s_ind = np.random.choice(inds_bin_cur[0])
                        pos_s = valid_pts[s_ind]
                        pos_s_prev = pos_s
                    else:
                        # get closest valid position for smooth trajectory
                        pos_s_cur_all = valid_pts[inds_bin_cur[0]]
                        distances_to_prev = np.sqrt(
                            np.sum((pos_s_cur_all - pos_s_prev)**2, axis=1))
                        argmin_s_ind_cur = np.argmin(distances_to_prev)
                        s_ind = inds_bin_cur[0][argmin_s_ind_cur]
                        pos_s = valid_pts[s_ind]
                        pos_s_prev = pos_s

                    valid_pts_selected.append(pos_s)
                    agent_state = habitat_sim.AgentState()
                    agent_state.position = pos_s + np.array([0, 1.5, 0])

                    # YAW calculation - rotate to object
                    agent_to_obj = np.squeeze(
                        obj_center) - agent_state.position
                    agent_local_forward = np.array([0, 0, -1.0])  # y, z, x
                    flat_to_obj = np.array(
                        [agent_to_obj[0], 0.0, agent_to_obj[2]])
                    flat_dist_to_obj = np.linalg.norm(flat_to_obj)
                    flat_to_obj /= flat_dist_to_obj

                    det = (flat_to_obj[0] * agent_local_forward[2] -
                           agent_local_forward[0] * flat_to_obj[2])
                    turn_angle = math.atan2(
                        det, np.dot(agent_local_forward, flat_to_obj))
                    quat_yaw = quat_from_angle_axis(turn_angle,
                                                    np.array([0, 1.0, 0]))

                    # Set agent yaw rotation to look at object
                    agent_state.rotation = quat_yaw

                    # change sensor state to default
                    # need to move the sensors too
                    if self.verbose:
                        print(self.agent.state.sensor_states)
                    for sensor in self.agent.state.sensor_states:
                        # st()
                        self.agent.state.sensor_states[
                            sensor].rotation = agent_state.rotation
                        self.agent.state.sensor_states[
                            sensor].position = agent_state.position  # + np.array([0, 1.5, 0]) # ADDED IN UP TOP

                    # Calculate Pitch from head to object
                    turn_pitch = np.degrees(
                        math.atan2(agent_to_obj[1], flat_dist_to_obj))
                    num_turns = np.abs(
                        np.floor(turn_pitch / self.rot_interval)
                    ).astype(
                        int
                    )  # compute number of times to move head up or down by rot_interval
                    if self.verbose:
                        print("MOVING HEAD ", num_turns, " TIMES")
                    movement = "look_up" if turn_pitch > 0 else "look_down"

                    # initiate agent
                    self.agent.set_state(agent_state)
                    self.sim.step(action)

                    # Rotate "head" of agent up or down based on calculated pitch angle to object to center it in view
                    if num_turns == 0:
                        pass
                    else:
                        for turn in range(num_turns):
                            # st()
                            self.sim.step(movement)
                            if self.verbose:
                                for sensor in self.agent.state.sensor_states:
                                    print(self.agent.state.
                                          sensor_states[sensor].rotation)

                    # get observations after centiering
                    observations = self.sim.step(action)

                    # Assuming all sensors have same rotation and position
                    observations["rotations"] = self.agent.state.sensor_states[
                        'color_sensor'].rotation  #agent_state.rotation
                    observations["positions"] = self.agent.state.sensor_states[
                        'color_sensor'].position

                    if self.is_valid_datapoint(observations, obj):
                        if self.verbose:
                            print("episode is valid......")
                        episodes.append(observations)
                        if self.visualize:
                            rgb = observations["color_sensor"]
                            semantic = observations["semantic_sensor"]
                            depth = observations["depth_sensor"]
                            self.display_sample(rgb,
                                                semantic,
                                                depth,
                                                mainobj=obj,
                                                visualize=False)

                    if cnt > 0:

                        origin_T_camX = quaternion.as_rotation_matrix(
                            episodes[cnt]["rotations"])
                        camX0_T_camX = np.matmul(camX0_T_origin, origin_T_camX)
                        # camX0_T_camX = np.matmul(camX0_T_origin, origin_T_camX)

                        origin_T_camXs.append(origin_T_camX)
                        origin_T_camXs_t.append(episodes[cnt]["positions"])

                        origin_T_camX_4x4 = np.eye(4)
                        origin_T_camX_4x4[0:3, 0:3] = origin_T_camX
                        origin_T_camX_4x4[:3, 3] = episodes[cnt]["positions"]
                        camX0_T_camX_4x4 = np.matmul(camX0_T_origin_4x4,
                                                     origin_T_camX_4x4)
                        camX_T_camX0_4x4 = self.safe_inverse_single(
                            camX0_T_camX_4x4)

                        camXs_T_camX0_4x4.append(camX_T_camX0_4x4)
                        camX0_T_camXs_4x4.append(camX0_T_camX_4x4)

                        # # trans_camX0_T_camX = episodes[cnt]["positions"] - episodes[0]["positions"]

                        # r_camX_T_camX0, t_camX_T_camX0, = self.split_rt_single(camX_T_camX0_4x4)
                        # r_camX_T_camX0_quat = quaternion.as_float_array(quaternion.from_rotation_matrix(r_camX_T_camX0))

                        # # print(t_camX_T_camX0)
                        # # print(trans_camX0_T_camX)

                        # # full_trans1 = np.hstack((cnt, trans, quat_diff1))
                        # full_trans_camX_T_camX0 = np.hstack((cnt, t_camX_T_camX0, r_camX_T_camX0_quat))

                        # # rots1.append(list(full_trans1))
                        # rots_cam0.append(list(full_trans_camX_T_camX0))
                        # # rots_origin.append(list(full_trans_origin))

                        camX0_T_camX_4x4 = self.safe_inverse_single(
                            camX_T_camX0_4x4)
                        origin_T_camX_4x4 = np.matmul(origin_T_camX0_4x4,
                                                      camX0_T_camX_4x4)
                        r_origin_T_camX, t_origin_T_camX, = self.split_rt_single(
                            origin_T_camX_4x4)

                        if self.verbose:
                            print(r_origin_T_camX)
                            print(origin_T_camX)

                    else:
                        origin_T_camX0_quat = episodes[0]["rotations"]
                        origin_T_camX0 = quaternion.as_rotation_matrix(
                            origin_T_camX0_quat)
                        camX0_T_origin = np.linalg.inv(origin_T_camX0)
                        # camX0_T_origin = self.safe_inverse_single(origin_T_camX0)

                        origin_T_camXs.append(origin_T_camX0)
                        origin_T_camXs_t.append(episodes[0]["positions"])

                        origin_T_camX0_4x4 = np.eye(4)
                        origin_T_camX0_4x4[0:3, 0:3] = origin_T_camX0
                        origin_T_camX0_4x4[:3, 3] = episodes[0]["positions"]
                        camX0_T_origin_4x4 = self.safe_inverse_single(
                            origin_T_camX0_4x4)

                        camXs_T_camX0_4x4.append(np.eye(4))

                        # r0 = quaternion.as_rotation_vector(episodes[0]["rotations"])
                        # quat_diff_origin = quaternion.as_float_array(episodes[0]["rotations"]) #quaternion.as_float_array(self.quaternion_from_two_vectors(self.origin_rot_vector,r0))
                        # full_trans_origin = np.hstack((cnt, episodes[0]["positions"], quat_diff_origin))
                        # rots_origin.append(list(full_trans_origin))
                    cnt += 1
                    id += 1

            print("Generated ", len(episodes), " views.")
            if len(episodes) < self.min_orbslam_views:
                print("NOT ENOUGH VIEWS FOR ORBSLAM")
                continue

            if self.save_imgs:
                with open(self.orbslam_path + "/" + 'rgb.txt', 'w') as f:
                    for item in texts_images:
                        f.write("%s\n" % item)

                with open(self.orbslam_path + "/" + 'depth.txt', 'w') as f:
                    for item in texts_depths:
                        f.write("%s\n" % item)

                # with open(self.orbslam_path + "/" + 'CamTraj1.txt', 'w') as f:
                #     for item in rots1:
                #         f.write("%s\n" % item)

                # with open(self.orbslam_path + "/" + 'CamTraj.txt', 'w') as f:
                #     for item in rots_cam0:
                #         f.write("%s\n" % item)

            ############ ORBSLAM ################
            if self.do_orbslam:
                camXs_T_camX0_4x4 = np.array(camXs_T_camX0_4x4)
                origin_T_camXs = np.array(origin_T_camXs)
                origin_T_camXs_t = np.array(origin_T_camXs_t)
                camX0_T_camXs_4x4 = np.array(camX0_T_camXs_4x4)

                orbslam_dir = "/home/nel/ORB_SLAM2"
                executable = os.path.join(orbslam_dir,
                                          "Examples/RGB-D/rgbd_tum")
                vocabulary_file = os.path.join(orbslam_dir,
                                               "Vocabulary/ORBvoc.txt")
                settings_file = os.path.join(orbslam_dir,
                                             "Examples/RGB-D/REPLICA.yaml")
                data_dir = os.path.join(orbslam_dir, "Examples/RGB-D/replica")
                associations_file = os.path.join(
                    orbslam_dir, "Examples/RGB-D/associations/replica.txt")
                associations_executable = os.path.join(
                    orbslam_dir, "Examples/RGB-D/associations/associate.py")
                rgb_file = os.path.join(orbslam_dir,
                                        "Examples/RGB-D/replica/rgb.txt")
                depth_file = os.path.join(orbslam_dir,
                                          "Examples/RGB-D/replica/depth.txt")

                # associate rgb and depth files
                print("ASSOCIATING FILES")
                associate(rgb_file, depth_file, associations_file)

                # run ORBSLAM2
                print("RUNNING ORBSLAM2...")
                try:
                    output = subprocess.run([
                        executable, vocabulary_file, settings_file, data_dir,
                        associations_file
                    ],
                                            capture_output=True,
                                            text=True,
                                            check=True,
                                            timeout=800)
                except:
                    print("PROCESS TIMED OUT")
                    continue

                # load camera rotation and translation estimated by orbslam - these are wrt first frame
                camXs_T_camX0_orb_output = np.loadtxt(self.ORBSLAM_Cam_Traj)
                print(output)

                assert len(episodes) == camXs_T_camX0_orb_output.shape[
                    0], f"{camXs_T_camX0_orb_output.shape[0]}, {len(episodes)}"

                camXs_T_camX0_quant = []
                camXs_T_camX0 = []
                for i in range(camXs_T_camX0_orb_output.shape[0]):
                    cur = camXs_T_camX0_orb_output[i]
                    camX_T_camX0_quant = np.quaternion(cur[7], cur[4], cur[5],
                                                       cur[6])
                    camX_T_camX0 = quaternion.as_rotation_matrix(
                        camX_T_camX0_quant
                    )  # Need to negative rotation because axes are flipped
                    camXs_T_camX0_quant.append(camX_T_camX0_quant)
                    camXs_T_camX0.append(camX_T_camX0)
                camXs_T_camX0 = np.array(camXs_T_camX0)
                camXs_T_camX0_quant = np.array(camXs_T_camX0_quant)

                t = []
                for i in range(camXs_T_camX0_orb_output.shape[0]):
                    cur = camXs_T_camX0_orb_output[i]
                    t_cur = np.array([-cur[1], -cur[2], -cur[3]
                                      ])  # i think x shouldn't be inverted
                    t.append(t_cur)
                t = np.array(t)

                camXs_T_camX0_4x4_orb = []
                for i in range(camXs_T_camX0_orb_output.shape[0]):
                    # assert len(episodes) == camXs_T_camX0_orb_output.shape[0], f"{camXs_T_camX0_orb_output.shape[0]}, {len(episodes)}"
                    # get estimated 4x4
                    camX_T_camX0_4x4_orb = np.eye(4)
                    camX_T_camX0_4x4_orb[0:3, 0:3] = camXs_T_camX0[i]
                    camX_T_camX0_4x4_orb[:3, 3] = t[i]

                    # invert
                    camX0_T_camX_4x4_orb = self.safe_inverse_single(
                        camX_T_camX0_4x4_orb)

                    # convert to origin coordinates
                    origin_T_camX_4x4 = np.matmul(origin_T_camX0_4x4,
                                                  camX0_T_camX_4x4_orb)
                    r_origin_T_camX_orb, t_origin_T_camX_orb = self.split_rt_single(
                        origin_T_camX_4x4)
                    r_origin_T_camX_orb_quat = quaternion.from_rotation_matrix(
                        r_origin_T_camX_orb)

                    #save
                    episodes[i]["positions_orb"] = t_origin_T_camX_orb
                    episodes[i]["rotations_orb"] = r_origin_T_camX_orb_quat
                    camXs_T_camX0_4x4_orb.append(camX_T_camX0_4x4_orb)
                camXs_T_camX0_4x4_orb = np.array(camXs_T_camX0_4x4_orb)

                ## PLOTTTING #########

                # x_orbslam = []
                # y_orbslam = []
                # z_orbslam = []
                # for i in range(camXs_T_camX0_orb_output.shape[0]):
                #     camX_T_camX0_4x4_orb_r, camX_T_camX0_4x4_orb_t = self.split_rt_single(camXs_T_camX0_4x4_orb[i])
                #     x_orbslam.append(camX_T_camX0_4x4_orb_t[0])
                #     y_orbslam.append(camX_T_camX0_4x4_orb_t[1])
                #     z_orbslam.append(camX_T_camX0_4x4_orb_t[2])
                # x_orbslam = np.array(x_orbslam)
                # y_orbslam = np.array(y_orbslam)
                # z_orbslam = np.array(z_orbslam)

                # x_gt = []
                # y_gt = []
                # z_gt = []
                # for i in range(camXs_T_camX0_orb_output.shape[0]):
                #     camX_T_camX0_4x4_r, camX_T_camX0_4x4_t = self.split_rt_single(camXs_T_camX0_4x4[i])
                #     x_gt.append(camX_T_camX0_4x4_t[0])
                #     y_gt.append(camX_T_camX0_4x4_t[1])
                #     z_gt.append(camX_T_camX0_4x4_t[2])
                # x_gt = np.array(x_gt)
                # y_gt = np.array(y_gt)
                # z_gt = np.array(z_gt)

                print("PLOTTING")

                x_orbslam = []
                y_orbslam = []
                z_orbslam = []
                for i in range(camXs_T_camX0_orb_output.shape[0]):
                    x_orbslam.append(episodes[i]["positions_orb"][0])
                    y_orbslam.append(episodes[i]["positions_orb"][1])
                    z_orbslam.append(episodes[i]["positions_orb"][2])
                x_orbslam = np.array(x_orbslam)
                y_orbslam = np.array(y_orbslam)
                z_orbslam = np.array(z_orbslam)

                x_gt = []
                y_gt = []
                z_gt = []
                for i in range(camXs_T_camX0_orb_output.shape[0]):
                    x_gt.append(episodes[i]["positions"][0])
                    y_gt.append(episodes[i]["positions"][1])
                    z_gt.append(episodes[i]["positions"][2])
                x_gt = np.array(x_gt)
                y_gt = np.array(y_gt)
                z_gt = np.array(z_gt)

                from mpl_toolkits.mplot3d import Axes3D
                plt.figure()
                ax = plt.axes(projection='3d')
                ax.plot3D(x_orbslam, y_orbslam, z_orbslam, 'green')
                ax.plot3D(x_gt, y_gt, z_gt, 'blue')
                ax.set_xlabel('x')
                ax.set_ylabel('y')
                ax.set_zlabel('z')
                # plt.plot(-np.array(x_orbslam), np.array(y_orbslam), label='ORB-SLAM', color='green', linestyle='dashed')
                # plt.plot(x_gt, y_gt, label='GT', color='blue', linestyle='solid')
                # plt.legend()
                plt_name = 'maps/' + str(
                    self.ep_idx) + '_' + str(id) + '_' + 'map.png'
                plt.savefig(plt_name)

            if not self.do_orbslam:
                if len(episodes) >= self.num_views:
                    print(f'num episodes: {len(episodes)}')
                    data_folder = obj.category.name() + '_' + obj.id
                    data_path = os.path.join(self.basepath, data_folder)
                    print("Saving to ", data_path)
                    os.mkdir(data_path)
                    np.random.seed(1)
                    flat_obs = np.random.choice(episodes,
                                                self.num_views,
                                                replace=False)
                    viewnum = 0
                    for obs in flat_obs:
                        self.save_datapoint(self.agent, obs, data_path,
                                            viewnum, obj.id, True)
                        viewnum += 1

                else:
                    print(f"Not enough episodes: f{len(episodes)}")

            # the len of episodes is sometimes greater than camXs_T_camX0_orb_output.shape[0]
            # so we need to sample from episode number less than camXs_T_camX0_orb_output.shape[0]
            else:
                num_valid_episodes = camXs_T_camX0_orb_output.shape[0]
                if num_valid_episodes >= self.num_views:
                    print(f'num episodes: {num_valid_episodes}')
                    data_folder = obj.category.name() + '_' + obj.id
                    data_path = os.path.join(self.basepath, data_folder)
                    print("Saving to ", data_path)
                    os.mkdir(data_path)
                    np.random.seed(1)
                    flat_obs = np.random.choice(episodes[:num_valid_episodes],
                                                self.num_views,
                                                replace=False)
                    viewnum = 0
                    for obs in flat_obs:
                        self.save_datapoint(self.agent, obs, data_path,
                                            viewnum, obj.id, True)
                        viewnum += 1

                else:
                    print(f"Not enough episodes: f{len(episodes)}")

            if self.visualize:
                valid_pts_selected = np.vstack(valid_pts_selected)
                self.plot_navigable_points(valid_pts_selected)
        print(i, orb_num)
        if orb_num < 10:
            delete_arr.append(i)
    print("delete:", delete_arr)
    for i in delete_arr:
        del cloud_dict[i]

    orb_dict = {"kp": kp, "des": des, "assign": assign}

    return cloud_dict, orb_dict


if __name__ == "__main__":
    data_dir = "/data/datasets/yurouy/rgbd_dataset_freiburg1_xyz/"
    depth_path = data_dir + "depth.txt"
    rgb_path = data_dir + "rgb.txt"
    depth_list = read_file_list(depth_path)
    rgb_list = read_file_list(rgb_path)
    asso_list = associate(rgb_list,
                          depth_list,
                          offset=0.0,
                          max_difference=0.02)

    for i in range(1):
        rgb = data_dir + rgb_list[asso_list[i][0]][0]
        print(rgb)
        depth = data_dir + depth_list[asso_list[i][1]][0]
        rgb_im = Image.open(rgb)
        depth_im = Image.open(depth)
        get_clustered_point_cloud(np.array(rgb_im), np.array(depth_im))
Example #20
0
#!/usr/bin/python

import associate

import numpy as np
import cv2
from matplotlib import pyplot as plt

rgbPicsLoc = "rgb/"
depthPicsLoc = "depth/"

accelData = associate.read_file_list('accelerometer.txt')
depthData = associate.read_file_list('depth.txt')
rgbData = associate.read_file_list('rgb.txt')

pairedData = associate.associate(rgbData, depthData, 0.0, 0.02)
truthData = associate.read_file_list('groundtruth.txt')
rgbData = associate.read_file_list('rgb.txt')

fast = cv2.FastFeatureDetector_create()

img = cv2.imread(rgbData.items()[0][1][0], 0)
kp = fast.detect(img, None)
img2 = img
img2 = cv2.drawKeypoints(img, kp, img2, color=(255,0,0))
#cv2.imshow('image', img2)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#cv2.imwrite('drawn_img.png', img2)

def proj(x, F, C, B):
Example #21
0
    ''')
    parser.add_argument('first_file', help='ground truth trajectory (format: timestamp tx ty tz qx qy qz qw)')
    parser.add_argument('second_file', help='estimated trajectory (format: timestamp tx ty tz qx qy qz qw)')
    parser.add_argument('--offset', help='time offset added to the timestamps of the second file (default: 0.0)',default=0.0)
    parser.add_argument('--scale', help='scaling factor for the second trajectory (default: 1.0)',default=1.0)
    parser.add_argument('--max_difference', help='maximally allowed time difference for matching entries (default: 0.02)',default=0.02)
    parser.add_argument('--save', help='save aligned second trajectory to disk (format: stamp2 x2 y2 z2)')
    parser.add_argument('--save_associations', help='save associated first and aligned second trajectory to disk (format: stamp1 x1 y1 z1 stamp2 x2 y2 z2)')
    parser.add_argument('--plot', help='plot the first and the aligned second trajectory to an image (format: png)')
    parser.add_argument('--verbose', help='print all evaluation data (otherwise, only the RMSE absolute translational error in meters after alignment will be printed)', action='store_true')
    args = parser.parse_args()

    first_list = associate_euroc.read_file_list(args.first_file)
    second_list = associate.read_file_list(args.second_file)

    matches = associate.associate(first_list, second_list,float(args.offset),float(args.max_difference))    
    if len(matches)<2:
        sys.exit("Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?")


    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]] for a,b in matches]).transpose()
    second_xyz = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for a,b in matches]).transpose()
    rot,trans,trans_error = align(second_xyz,first_xyz)
    
    second_xyz_aligned = rot * second_xyz + trans
    
    first_stamps = first_list.keys()
    first_stamps.sort()
    first_xyz_full = numpy.matrix([[float(value) for value in first_list[b][0:3]] for b in first_stamps]).transpose()
    
    second_stamps = second_list.keys()
Example #22
0
def selNSGA3(problem, individuals, k):
    """Apply NSGA-III selection operator on the *individuals*. Usually, the
    size of *individuals* will be larger than *k* because any individual
    present in *individuals* will appear in the returned list at most once.
    Having the size of *individuals* equals to *k* will have no effect other
    than sorting the population according according to their front rank. The
    list returned contains references to the input *individuals*. For more
    details on the NSGA-II operator see [Deb2002]_.

    :param individuals: A list of individuals to select from.
    :param k: The number of individuals to select.
    :returns: A list of selected individuals.

    Deb, Kalyanmoy, and Himanshu Jain. "An evolutionary many-objective optimization algorithm using
    reference-point-based nondominated sorting approach, part i: Solving problems with box constraints."x
    Evolutionary Computation, IEEE Transactions on 18.4 (2014): 577-601.
    """

    #print "Length of individuals: ", len(individuals)


    pareto_fronts = sortNondominated(individuals, k)
    f_l_no = len(pareto_fronts) - 1
    P_t_1_no = len(list(chain(*pareto_fronts[:-1])))
    total_points_returned = len(list(chain(*pareto_fronts)))
    population =[]

    for front_no, front in enumerate(pareto_fronts):
        for i, dIndividual in enumerate(front):
            cells = []
            for j in xrange(len(dIndividual)):
                cells.append(dIndividual[j])
            population.append(jmoo_individual(problem, cells, dIndividual.fitness.values))
            population[-1].front_no = front_no

    print ">" * 10
    Z_s = cover(len(problem.objectives))
    Z_a = None
    Z_r = None

    if total_points_returned == k:
        return normalize(problem, population, Z_r, Z_s, Z_a)

    # S_t = P_t_1 + pareto_fronts[-1]


    K = k - P_t_1_no

    # Get the reference points


    population = normalize(problem, population, Z_r, Z_s, Z_a)
    population = associate(population, Z_s)

    f_l = []
    P_t_1 = []
    for pop in population:
        if pop.front_no == f_l_no:
            f_l.append(pop)
        else:
            P_t_1.append(pop)
    assert(len(P_t_1) == P_t_1_no), "Something's wrong"
    P_t_1 = niching(K, len(Z_s), P_t_1, f_l)
    assert(len(P_t_1) == jmoo_properties.MU), "Length is mismatched"
    return P_t_1