def edge_quality(n, edges, Tstar, R): good_edges = 0.0 bad_edges = 0.0 err_bad = 0.0 err_good = 0.0 for edge in edges: if edge['weight'] < 0.01: continue sid = edge['src'] tid = edge['tgt'] Ti = Tstar[sid] Tj = Tstar[tid] Tij_gt = Tj.dot(inverse(Ti)) Rij_in = edge['R'] tij_in = edge['t'] Tij_in = pack(Rij_in, tij_in) aerr_gt = angular_distance_np(Tij_in[np.newaxis, :3, :3], Tij_gt[np.newaxis, :3, :3]).sum() Rij = R[tid].dot(R[sid].T) aerr = np.linalg.norm(Rij - Rij_in, 'fro')**2 if aerr_gt > 30.0: bad_edges += edge['weight'] err_bad += aerr * edge['weight'] else: good_edges += edge['weight'] err_good += aerr * edge['weight'] print('Edge Quality: #good=%f, #bad=%f, mean aerr=(%f, %f)' % (good_edges, bad_edges, err_good / good_edges, err_bad / bad_edges))
def main(): depth_paths, T, pose_paths = getData(args.shapeid) n = len(depth_paths) print('found %d clean depth images...' % n) intrinsic = np.array([[525.0,0,319.5],[0,525.0,239.5],[0,0,1]]) np.random.seed(816) indices = np.random.permutation(n) print(indices[:100]) #indices = sorted(indices) make_dirs(PATH_MAT.format(args.shapeid, 0)) import open3d pcd_combined = open3d.PointCloud() for i, idx in enumerate(indices): import ipdb; ipdb.set_trace() print('%d / %d' % (i, len(indices))) mesh = Mesh.read(depth_paths[idx], mode='depth', intrinsic = intrinsic) pcd = open3d.PointCloud() pcd.points = open3d.Vector3dVector(mesh.vertex.T) pcd.transform(inverse(T[idx])) #pcd = open3d.voxel_down_sample(pcd, voxel_size=0.02) pcd_combined += pcd pcd_combined = open3d.voxel_down_sample(pcd_combined, voxel_size=0.02) sio.savemat(PATH_MAT.format(args.shapeid, i), mdict={ 'vertex': mesh.vertex, 'validIdx_rowmajor': mesh.validIdx, 'pose': T[idx], 'depth_path': depth_paths[idx], 'pose_path': pose_paths[idx]}) if i <= 50 and i >= 40: pcd_combined_down = open3d.voxel_down_sample(pcd_combined, voxel_size=0.02) open3d.draw_geometries([pcd_combined_down]) pcd_combined_down = open3d.voxel_down_sample(pcd_combined, voxel_size=0.02) open3d.draw_geometries([pcd_combined_down])
def __init__(self, center, radius, normal, I, scene, loops = 1, pitch = 1): Shape.__init__(self, scene) self.center = center self.radius = radius self.normal = normal self.loops = loops self.pitch = pitch self.length = loops*pitch self.I = I # some consonants self.C = mu_0*I/pi self.oner = [[1,0,0], [0,1,0], [0,0,1]] # rotation matrices for this coil, rotating so norm becomes z axis self.rotate = self.find_rotmatrix(normal.norm(), vector(0, 0, 1)) self.antiRotate = inverse(self.rotate) if loops == 1: self.obj = ring(pos = center, axis = normal*self.length, radius = radius, thickness = 0.1, display = scene, material = materials.chrome) else: self.obj = helix(pos = center, axis = normal*self.length, radius = radius, coils = loops, thickness = 0.1, display = scene, material = materials.chrome)
def error(T, G): aerrs = [] n = T.shape[0] terrs = [] for i in range(n): for j in range(i + 1, n): Ti = T[i, :, :] Tj = T[j, :, :] Gi = G[i, :, :] Gj = G[j, :, :] Tij = Tj.dot(inverse(Ti)) Gij = Gj.dot(inverse(Gi)) Rij = Tij[:3, :3] Rij_gt = Gij[:3, :3] fro = np.linalg.norm(Rij - Rij_gt, 'fro') aerr = angular_distance_np(Rij[np.newaxis, :, :], Rij_gt[np.newaxis, :, :]).sum() terr = np.linalg.norm(Tij[:3, 3] - Gij[:3, 3], 2) aerrs.append(aerr) terrs.append(terr) return np.mean(aerrs), np.mean(terrs)
def __get_label__(Rij, tij, Ti, Tj): """ Measure Quality of Edge """ Ristar, tistar = decompose(Ti) Rjstar, tjstar = decompose(Tj) Tij_gt = Tj.dot(inverse(Ti)) Tij_in = pack(Rij, tij) label = 0.0 err_R = angular_distance_np(Rij[np.newaxis, :, :], Rjstar.dot(Ristar.T)[np.newaxis, :, :]).sum() err_T = np.linalg.norm(Tij_gt[:3, 3] - Tij_in[:3, 3], 2) if (err_R < 30.0) and (err_T < 0.2): label = 1.0 else: label = 0.0 return label
def getData(shapeid): depth_paths = [] poses = [] pose_paths = [] frames = glob.glob(PATH_DEPTH.format(shapeid, '*')) frames.sort() for i, frame in enumerate(frames): frameid = frame.split('/')[-1].split('.')[0] depth_path = PATH_DEPTH.format(shapeid, frameid) #tmp = cv2.resize(cv2.imread(imgsPath, 2)/1000., (64,64)) #AuthenticdepthMap.append(tmp.reshape(1,tmp.shape[0],tmp.shape[1],1)) pose_fp = PATH_POSE.format(shapeid, frameid) flag = True try: tmp = np.loadtxt(pose_fp) assert abs(tmp[3, 3] - 1.0) < 1e-4, 'bottom right corner should be one' assert (abs(tmp[3, :3]) < 1e-4).all(), '[3, :3] should be zero' R = tmp[:3, :3] assert np.linalg.det(R) > 0.01, 'determinant should be 1' assert np.linalg.norm( R.dot(R.T) - np.eye(3), 'fro')**2 < 1e-4, 'should be a rotation matrix' project_R = project_so(R) assert np.linalg.norm( R - project_R, 'fro')**2 < 1e-4, 'projection onto SO3 should be identical' tmp[:3, :3] = project_R tmp = inverse(tmp) except Exception as e: print('error on {}: {}'.format(pose_fp, e)) #print(R.dot(R.T)) #print(np.linalg.norm(R.dot(R.T) - np.eye(3), 'fro')) flag = False if not flag: print('ignoring frame {}'.format(frameid)) continue poses.append(tmp) depth_paths.append(depth_path) pose_paths.append(pose_fp) if len(poses) == 0: T = None else: T = np.concatenate(poses).reshape(-1, 4, 4) return depth_paths, T, pose_paths
def compute_sigma(mat_file1, mat_file2, txt, output_mat): mat1 = sio.loadmat(mat_file1) mat2 = sio.loadmat(mat_file2) v1 = mat1['vertex'] # [3, n] v2 = mat2['vertex'] # [3, n] Tij = read_super4pcs(txt) Tij_gt = mat2['pose'].dot(inverse(mat1['pose'])) Rij = Tij[:3, :3] tij = Tij[:3, 3] v1 = Rij.dot(v1) + tij[:, np.newaxis] tree = NN(n_neighbors=1, algorithm='kd_tree').fit(v1.T) distances, _ = tree.kneighbors(v2.T) distances = distances[distances < 0.2] d = {} d['sigma'] = np.median(distances) d['Tij'] = Tij d['aerr'] = angular_distance_np(Tij[np.newaxis, :3, :3], Tij_gt[np.newaxis, :3, :3]).sum() d['terr'] = np.linalg.norm(Tij[:3, 3]- Tij_gt[:3, 3], 2) d['src'] = mat_file1 d['tgt'] = mat_file2 sio.savemat(output_mat, mdict=d, do_compression=True)
def reweightEdges(n, edges, R, t, sigma_r, sigma_t, plot=False): theta1 = 4 terrs = [] if plot: plots = [] for edge in edges: i = edge['src'] j = edge['tgt'] Rij = edge['R'] tij = edge['t'] Tij_in = pack(Rij, tij) Ti = pack(R[i], t[i]) Tj = pack(R[j], t[j]) Tij_rec = Tj.dot(inverse(Ti)) weight = edge['predicted_weight'] if weight < 0.01: weight = 0.00 if weight > 1.0: weight = 1.0 aerr_fro = np.linalg.norm(Tij_rec[:3, :3] - Tij_in[:3, :3], 'fro') * sigmoid((0.5 - weight) * 5) terr_fro = np.linalg.norm(Tij_rec[:3, 3] - Tij_in[:3, 3], 2) * sigmoid( (0.5 - weight) * 5) #fro2 = aerr_fro ** 2 + terr_fro ** 2 edge['rotation_weight'] = (sigma_r**theta1) / (sigma_r**theta1 + aerr_fro**(theta1)) edge['translation_weight'] = (sigma_t**theta1) / (sigma_t**theta1 + terr_fro**(theta1)) if plot: plots.append([aerr_fro, edge['rotation_weight']]) if plot: plots = np.array(plots) import matplotlib.pyplot as plt plt.scatter(plots[:, 0], plots[:, 1]) plt.show()
def generate_synthetic(n, sigma): T = np.zeros((n, 4, 4)) X = so.rvs(dim=3, size=n) T[:, :3, :3] = X # u, sigma, v = np.linalg.svd(T[0]) T[:, :3, 3] = np.random.randn(n, 3) T[0, :3, 3] = 0.0 T[:, 3, 3] = 1 edges = [] for i in range(n): for j in range(n): if i <= j: continue Tij = T[j].dot(inverse(T[i])) Rij, tij = decompose(Tij) Rij = Rij + np.random.randn(3, 3) * sigma Rij = project_so(Rij) tij = tij + np.random.randn(3) * sigma Tij = pack(Rij, tij) edge = {'src': i, 'tgt': j, 'R': Rij, 't': tij, 'weight': 1.0} edges.append(edge) edges = np.array(edges) return n, edges, T
def __init__(self, center, radius, normal, I, scene, loops=1, pitch=1): Shape.__init__(self, scene) self.center = center self.radius = radius self.normal = normal self.loops = loops self.pitch = pitch self.length = loops * pitch self.I = I # some consonants self.C = mu_0 * I / pi self.oner = [[1, 0, 0], [0, 1, 0], [0, 0, 1]] # rotation matrices for this coil, rotating so norm becomes z axis self.rotate = self.find_rotmatrix(normal.norm(), vector(0, 0, 1)) self.antiRotate = inverse(self.rotate) if loops == 1: self.obj = ring(pos=center, axis=normal * self.length, radius=radius, thickness=0.1, display=scene, material=materials.chrome) else: self.obj = helix(pos=center, axis=normal * self.length, radius=radius, coils=loops, thickness=0.1, display=scene, material=materials.chrome)
def IterativeTransfSync(n, edges, eps0=-1, decay=0.8, Tstar=None, max_iter=10000, cheat=False, scheme='reweight'): if scheme == 'reweight': reweight = True else: reweight = False if cheat: for edge in edges: #if edge['weight'] < 0.5: # continue sid = edge['src'] tid = edge['tgt'] Ti = Tstar[sid] Tj = Tstar[tid] Tij_gt = Tj.dot(inverse(Ti)) Rij = edge['R'] tij = edge['t'] Tij = pack(Rij, tij) aerr = angular_distance_np(Tij[np.newaxis, :3, :3], Tij_gt[np.newaxis, :3, :3]).sum() terr = np.linalg.norm(Tij[:3, 3] - Tij_gt[:3, 3], 2) #p = 0.9 if aerr > 30.0 or terr > 0.2: weight = 0.0 #coin(0.02) else: weight = 1.0 #coin(0.9) edge['predicted_weight'] = weight edge['translation_weight'] = weight edge['rotation_weight'] = weight """ Edge Quality """ good_edges = 0.0 bad_edges = 0.0 err_bad = 0.0 err_good = 0.0 for edge in edges: if edge['predicted_weight'] < 0.5: continue sid = edge['src'] tid = edge['tgt'] Ti = Tstar[sid] Tj = Tstar[tid] Tij_gt = Tj.dot(inverse(Ti)) Rij = edge['R'] tij = edge['t'] Tij = pack(Rij, tij) aerr = angular_distance_np(Tij[np.newaxis, :3, :3], Tij_gt[np.newaxis, :3, :3]).sum() terr = np.linalg.norm(Tij[:3, 3] - Tij_gt[:3, 3], 2) if aerr > 30.0 or terr > 0.2: #print(sid, tid, edge['predicted_weight']) bad_edges += 1 err_bad += aerr * edge['predicted_weight'] else: good_edges += 1 err_good += aerr * edge['predicted_weight'] print('Edge Quality: #good=%f, #bad=%f' % (good_edges, bad_edges)) itr = 0 while itr < max_iter: R, t, eigengap = TransfSync(n, edges) #edge_quality(n, edges, Tstar, R) T = np.array([pack(R[i], t[i]) for i in range(n)]) err_gt = -1.0 if Tstar is not None: aerr_gt, terr_gt = error(T, Tstar) if not reweight: if eps0 < -0.5: eps0 = max_existing_err(n, edges, R, t) if reweight: reweightEdges(n, edges, R, t, sigma_r=0.01, sigma_t=0.01) else: truncatedWeightPredict(n, edges, R, t, eps0) mindeg, numedges, err_sum = computeStats(n, edges, R, t) print( 'iter=%d, avg(err^2)=%f, eigengap=%f, #edges=%d, min_deg=%f, eps0=%f, aerr_gt=%f, terr_gt=%f' % (itr, err_sum / numedges, eigengap, numedges, mindeg, eps0, aerr_gt, terr_gt)) """ Skip idle iterations """ if reweight: itr += 1 else: max_err = max_existing_err(n, edges, R, t) while (itr < max_iter) and (eps0 > max_err): eps0 = eps0 * decay itr += 1 if mindeg == 0: break if err_sum <= 1e-2: break return T
if __name__ == "__main__": voxel_size = 0.02 # means 5cm for the dataset import argparse parser = argparse.ArgumentParser( description='Baseline Algorithm: Fast Global Registration') parser.add_argument('files', type=str, nargs='+', help='src, tgt, output') args = parser.parse_args() src_mat = sio.loadmat(args.files[0]) tgt_mat = sio.loadmat(args.files[1]) src_pose = src_mat['pose'] tgt_pose = tgt_mat['pose'] src = src_mat['vertex'] tgt = tgt_mat['vertex'] Tij = tgt_pose.dot(inverse(src_pose)) src_pc = open3d.PointCloud() src_pc.points = open3d.Vector3dVector(src.T) tgt_pc = open3d.PointCloud() tgt_pc.points = open3d.Vector3dVector(tgt.T) #source, target, source_down, target_down, source_fpfh, target_fpfh = \ # prepare_dataset(voxel_size) #draw_registration_result(src_pc, tgt_pc, Tij) open3d.estimate_normals(src_pc, search_param=open3d.KDTreeSearchParamHybrid( radius=0.2, max_nn=60)) open3d.estimate_normals(tgt_pc, search_param=open3d.KDTreeSearchParamHybrid( radius=0.2, max_nn=60)) source_down, source_fpfh = preprocess_point_cloud(src_pc, voxel_size)
sigmas = [] for line in lines: summary_mat = '%s/relative_pose/summary/%s/%s/%s.mat' % (data_path, dataset, source, line) summary_mat = sio.loadmat(summary_mat) T = summary_mat['T'] Tstar = summary_mat['Tstar'] aerr = summary_mat['aerr'] terr = summary_mat['terr'] sigma = summary_mat['sigma'] n = Tstar.shape[0] n = 30 for i in range(n): for j in range(i+1, n): Tij = T[i*4:(i+1)*4, j*4:(j+1)*4] Tij_gt = Tstar[j, :, :].dot(inverse(Tstar[i, :, :])) terr_ij = np.linalg.norm((Tij_gt - Tij)[:3, 3], 2) assert abs(terr_ij - terr[i, j]) < 1e-4 terrs.append(terr_ij) aerr_ij = angular_distance_np(Tij_gt[np.newaxis, :3, :3], Tij[np.newaxis, :3, :3]).sum() assert abs(aerr_ij - aerr[i, j]) < 1e-4 aerrs.append(aerr_ij) sigmas.append(sigma[i, j]) aerrs = np.array(aerrs) terrs = np.array(terrs) sigmas = np.array(sigmas) for sigma_threshold in [0.1, 0.2]: valid_indices = np.where(sigmas < sigma_threshold)[0]
terr = np.zeros((n, n)) + 10000000.0 RLlist = reader.list_relative_poses(dataset, source, sceneid) if len(RLlist) < 4950: continue top = 0 bottom = 0 for mat in reader.list_relative_poses(dataset, source, sceneid): s = sio.loadmat(mat) src, tgt = mat.split('/')[-1].split('.')[0].split('_')[:2] sid = scanid_map[src] tid = scanid_map[tgt] Tij = s['Tij'] if sid > tid: tmp = sid; sid = tid; tid = tmp Tij = inverse(Tij) assert sid < tid sigma[sid, tid] = s['sigma'] aerr[sid, tid] = s['aerr'] terr[sid, tid] = s['terr'] if s['aerr'] < 15.0 and s['terr'] < 0.2: top += 1.0 bottom += 1.0 T[sid*4:(sid+1)*4, tid*4:(tid+1)*4] = Tij Tstar = np.zeros((n, 4, 4)) for i, scanid in enumerate(scanids): scan = reader.read_scan(dataset, sceneid, scanid, variable_names=['pose']) Tstar[i, :, :] = scan['pose'] print(sceneid, 'good ratio=', top / bottom)