def __getitem__(self, idx): ply_path = self.files[idx]['ply_path'] category = self.files[idx]['category'] data = read_ply(ply_path) pointcloud = self.transforms( np.vstack((data['x'], data['y'], data['z'])).T) return {'pointcloud': pointcloud, 'category': self.classes[category]}
def _gen_clouds(root, id_category, nPerCat, sous_echantillonage): root += "/" res = [] for cat in id_category: path = root + str(cat) + "/ply/" # ignore les fichiers .txt keys = [key for key in os.listdir(path) if key[-4:] != ".txt"] for key in sorted(keys)[:nPerCat]: sub_path = path + key cloud = ply.read_ply(sub_path)['points'] if type(sous_echantillonage) == int: ratio = sous_echantillonage / len(cloud.values) if ratio > 1: print("PAS ASSEZ DE POINTS DANS LE NUAGE", sous_echantillonage, '/', len(cloud.values)) else: ratio = sous_echantillonage sub_sampled = [] for i, x in enumerate(cloud.values[:, :3]): if len(sub_sampled) / (i + 1) < ratio: sub_sampled.append(torch.tensor(x)) assert ratio > 1 and len(sub_sampled) == len( cloud.values) or len(sub_sampled) == ratio * len(cloud.values) # noinspection PyTypeChecker res.append(torch.cat(sub_sampled).reshape((-1, 3))) assert not res[0].requires_grad return np.array(res, dtype=torch.Tensor)
def load_points(fname): cloud_ply = read_ply(fname) points = np.vstack((cloud_ply['x'], cloud_ply['y'], cloud_ply['z'])).T if ('class' in cloud_ply.dtype.fields): labels = cloud_ply['class'] return points, labels else: return points
def initializeFaces(pathToData, loadFromFile=False, filename='faces.json'): if (loadFromFile and Path(filename).is_file()): with open(filename, 'r') as outfile: jcontent = json.load(outfile) tsurf = triangulatedSurfaces(np.array(jcontent['triangulation']), np.array(jcontent['meshes'])) else: fileFormat = '*.ply' files = sorted(glob.glob(pathToData + fileFormat)) print("All files") plydata = read_ply(files[0]) triangulation = plydata['mesh'].values points = plydata['points'] x = points.x.values numOfPoints = len(x) meshList = np.zeros((numOfPoints * 3, len(files))) for (i, file) in enumerate(files): print(" - " + file) plydata = read_ply(file) points = plydata['points'] x = points.x.values y = points.y.values z = points.z.values data = np.array((x, y, z)).T data = np.reshape(data, (-1, 1)) meshList[:, i] = data[:, 0] tsurf = triangulatedSurfaces(triangulation, meshList) with open(filename, 'w') as outfile: json.dump( { 'triangulation': tsurf.triangulation.tolist(), 'meshes': tsurf.meshes.tolist() }, outfile) return tsurf
def __init__(self,path_file, sel_mod = "rel", self_args = {'thresh': 0.01},\ neigh_args = {'k':10}, neigh_flag = "k", k_harris = 0.04): data = ply.read_ply(path_file) pos = np.stack([data['x'],data['y'], data['z']]).T #Positions: Nx3 self.mesh_objects =base.Meshgrid(pos) self.repeatbiliy_thresh = self.mesh_objects.diameter * self_args['thresh'] self.sel_mod = sel_mod self.sel_args = self_args self.neigh_args = neigh_args self.neigh_flag = neigh_flag self.k_harris = k_harris assert not np.all(np.isnan(pos)) and not np.all(np.isinf(pos))
def transform(root, filename): if not os.path.exists('./ply_gen'): os.makedirs('./ply_gen') # Example transformation. # 1. Read the PLY data file. data = read_ply(os.path.join(root, filename)) # 2. Shift points by the vector (25, -10, 7). data = translate(data, 25, -10, 7) # 3. Rotate points 45 degress around the Z-axis. data = rotate_z(data, 45) # 4. Write the new points into a new PLY data file. write_ply(os.path.join('./ply_gen', filename), [data['x'], data['y'], data['z'], data['x_origin'], data['y_origin'], data['z_origin'], data['GPS_time'], data['reflectance']], ['x', 'y', 'z', 'x_origin', 'y_origin', 'z_origin', 'GPS_time', 'reflectance'])
if __name__ == '__main__': # Transformation estimation # ************************* # # Cloud paths bunny_o_path = '../data/bunny_original.ply' bunny_p_path = '../data/bunny_perturbed.ply' dragon_o_path = '../data/dragon_original.ply' dragon_p_path = '../data/dragon_perturbed.ply' # Load clouds UseBunny = True if UseBunny: # Load Bunny point cloud cloud_o_ply = read_ply(bunny_o_path) cloud_p_ply = read_ply(bunny_p_path) else: # Load Dragon point cloud cloud_o_ply = read_ply(dragon_o_path) cloud_p_ply = read_ply(dragon_p_path) cloud_o = np.vstack((cloud_o_ply['x'], cloud_o_ply['y'], cloud_o_ply['z'])) cloud_p = np.vstack((cloud_p_ply['x'], cloud_p_ply['y'], cloud_p_ply['z'])) # Random transformation apply_random_transfo = False if apply_random_transfo: np.random.seed(42) t = np.random.randn(3) * 0.05 thetas = np.pi * np.random.rand(3) R = RotMatrix(thetas)
def from_tri_to_vertices(tri): v1, v2, v3 = np.split(tri, 3, axis=-1) vertices = np.squeeze(np.concatenate([v1, v2, v3], axis=0)) vertices = vertices.tolist() vertices = set(vertices) return vertices def from_vertices_to_mask(vertices, max_vertex_id): ver_mask = np.zeros([max_ver_id]) for v in vertices: ver_mask[v] = 1 ver_mask = np.array(ver_mask) return ver_mask wo_eyebrow_data = read_ply(wo_eyebrow_path) wo_eyebrow_vertices = from_tri_to_vertices(np.array(wo_eyebrow_data['mesh'])) wo_nose_data = read_ply(wo_nose_path) wo_nose_vertices = from_tri_to_vertices(np.array(wo_nose_data['mesh'])) max_ver_id = np.array(wo_eyebrow_data['points']).shape[0] wo_eyebrow_mask = from_vertices_to_mask(wo_eyebrow_vertices, max_ver_id) wo_nose_mask = from_vertices_to_mask(wo_nose_vertices, max_ver_id) np.save('../resources/wo_eyebrow_mask.npy', wo_eyebrow_mask) np.save('../resources/wo_nose_mask.npy', wo_nose_mask)
def partply(partpath, rot, opath): T = np.dtype([("n", np.uint8), ("i0", np.int32), ('i1', np.int32), ('i2', np.int32)]) data = read_ply(os.path.join(partpath, 'point_sample', 'ply-10000.ply')) label = np.loadtxt(os.path.join(partpath, 'point_sample', 'label-10000.txt'), dtype=np.int32) plypts = np.array(data['points'])[:, :3] start = np.min(label) end = np.max(label) part_map = json.load(open(os.path.join(partpath, 'result_map.json'), 'r')) partv_lst = [] partf_lst = [] for i in range(start, end + 1): num = np.sum(label == i) if num > 0: pv = [] pf = [] pvn = 0 for name in part_map['%d' % i]['objs']: pvi, pfi = read_obj( os.path.join(partpath, 'objs', name + '.obj')) pv.append(pvi) pf.append(pfi + pvn) pvn += pvi.shape[0] if len(pv) > 1: partv_lst.append(np.concatenate(pv, axis=0)) partf_lst.append(np.concatenate(pf, axis=0)) else: partv_lst.append(pv[0]) partf_lst.append(pf[0]) partpts = np.concatenate(partv_lst, axis=0) center, scale = tounit_param(partpts) for parti in range(len(partv_lst)): partptsi = partv_lst[parti] partface = partf_lst[parti] r = R.from_euler('y', 180, degrees=True) pc = r.apply(partptsi).astype(np.float32) pc -= center pc /= scale r = R.from_euler('y', rot, degrees=True) pc = r.apply(pc).astype(np.float32) face = np.zeros(shape=[len(partface)], dtype=T) for i in range(len(partface)): face[i] = (3, int(partface[i][0]), int(partface[i][1]), int(partface[i][2])) r = R.from_euler('x', 90, degrees=True) pc = r.apply(pc).astype(np.float32) rc = pd.DataFrame( np.repeat(np.array([[255, 0, 0]], dtype=np.uint8), partptsi.shape[0], axis=0)) bc = pd.DataFrame( np.repeat(np.array([[0, 0, 255]], dtype=np.uint8), partptsi.shape[0], axis=0)) pc = pd.DataFrame(pc) partptsia = pd.concat([pc, rc], axis=1, ignore_index=True) partptsib = pd.concat([pc, bc], axis=1, ignore_index=True) write_ply(os.path.join(opath, 'p_%d_a.ply' % parti), points=partptsia, faces=pd.DataFrame(face), color=True) write_ply(os.path.join(opath, 'p_%d_b.ply' % parti), points=partptsib, faces=pd.DataFrame(face), color=True) return
def pack(pnpath, partpath, spnobjpath, opath, id, angle): partp = os.path.join(partpath, 'part_r%d' % angle) imgp = os.path.dirname(spnobjpath) data = read_ply(os.path.join(pnpath, 'point_sample', 'ply-10000.ply')) label = np.loadtxt(os.path.join(pnpath, 'point_sample', 'label-10000.txt'), dtype=np.int32) plypts = np.array(data['points'])[:, :3] start = np.min(label) end = np.max(label) cnt = 0 msklst = [] smsklst = [] ps = [] pstouch = [] r1 = R.from_euler('x', -90, degrees=True) for i in range(start, end + 1): pv = plypts[label == i, :3].astype(np.float32) num = pv.shape[0] if num > 0: mskp = 'p_%d_b_msk0001.png' % (cnt) msk = Image.open(os.path.join(partp, 'all_msk', mskp)) msk = np.array(msk).astype(np.float32) / 255.0 msk = msk[:, :, 2] smsk = Image.open(os.path.join(partp, 'self_msk', mskp)) smsk = np.array(smsk).astype(np.float32) / 255.0 smsk = smsk[:, :, 2] if np.sum(msk) > 9: pstouch.append(pv) cpath = os.path.join(partp, 'p_%d_b.ply' % cnt) pts = read_ply(cpath) pvp = np.array(pts['points'])[:, :3].astype(np.float32) pvp = r1.apply(pvp) ps.append(pvp) smsklst.append(smsk) msklst.append(msk) cnt += 1 print(len(ps)) num = len(ps) obblst = [] obbp = [] obbf = [] obbcnt = 0 for pts in ps: obba = OBB.build_by_trimesh(pts) obbb = OBB.build_from_points(pts) if (obba is None) or ((obbb is not None) and (obba.volume > obbb.volume)): obbr = obbb else: obbr = obba #print('size:',obba.volume); obblst.append(obbr) obbf.append(bf + obbcnt * 8) obbcnt += 1 #print('obbf:',len(obbf)); #print('obbcnt:',obbcnt); mm = [] for pi in range(num - 1): for pj in range(pi + 1, num): da, db = ( pstouch[pi], pstouch[pj]) if obblst[pi].volume > obblst[pj].volume else ( pstouch[pj], pstouch[pi]) tree = scipy.spatial.KDTree(da) dsta, idxa = tree.query(da, k=2) dstb, idxb = tree.query(db, k=1) if np.min(dstb) < np.mean(dsta[:, 1]): mm.append(np.array([pi, pj], dtype=np.int32)) print('mm:', len(mm)) if len(mm) < 1: return img = Image.open(os.path.join(imgp, 'model_normalized_r%d_e.png' % angle)) img = np.array(img).astype(np.float32) / 255.0 h5fo = h5py.File(os.path.join(opath, id + '_r%d.h5' % angle), 'w') h5fo.create_dataset("img", data=img, compression="gzip", compression_opts=9) packorigin(imgp, angle, h5fo) h5fo.create_dataset("touch", data=np.stack(mm, axis=0), compression="gzip", compression_opts=9) msks = np.stack(msklst, axis=0) h5fo.create_dataset("msk", data=msks, compression="gzip", compression_opts=9) smsks = np.stack(smsklst, axis=0) h5fo.create_dataset("smsk", data=smsks, compression="gzip", compression_opts=9) obbk = [] for obb in obblst: obbp.append(obb.points) obbk.append(obb.tov) obbv = np.concatenate(obbp, axis=0) h5fo.create_dataset("box", data=np.stack(obbk, axis=0), compression="gzip", compression_opts=9) fidx = np.concatenate(obbf, axis=0) T = np.dtype([("n", np.uint8), ("i0", np.int32), ('i1', np.int32), ('i2', np.int32)]) face = np.zeros(shape=[12 * len(obbf)], dtype=T) for i in range(fidx.shape[0]): face[i] = (3, fidx[i, 0], fidx[i, 1], fidx[i, 2]) print(mm, file=open(os.path.join(partpath, 'mm_r%d.txt' % angle), 'w')) obox = os.path.join(partpath, 'box_r%d.ply' % angle) write_ply(obox, points=pd.DataFrame(obbv.astype(np.float32)), faces=pd.DataFrame(face), as_text=True) h5fo.close()