def GenObj_rh5(): xyz_cut_rate = [0, 0, 0.9] xyz_cut_rate = None path = '/home/z/Research/dynamic_pointnet/data/Scannet__H5F/BasicData/rawh5' fn_ls = glob.glob(path + '/scene0002*.rh5') path = '/home/z/Research/dynamic_pointnet/data/ETH__H5F/BasicData/rawh5' fn_ls = glob.glob(path + '/marketplacefeldkirch_station7_intensity_rgb.rh5') fn_ls = glob.glob(path + '/StGallenCathedral_station6_rgb_intensity-reduced.rh5') fn_ls = glob.glob(path + '/untermaederbrunnen_station3_xyz_intensity_rgb.rh5') path = '/home/z/Research/SparseVoxelNet/data/MATTERPORT_H5TF/rawh5/17DRP5sb8fy' fn_ls = glob.glob(path + '/region0.rh5') for fn in fn_ls: if not Raw_H5f.check_rh5_intact(fn)[0]: print('rh5 not intact, abort gen obj') return with h5py.File(fn, 'r') as h5f: rawh5f = Raw_H5f(h5f, fn) rawh5f.generate_objfile(IsLabelColor=True, xyz_cut_rate=xyz_cut_rate)
def generate_kitti_to_rawh5f(): label_file_list = glob.glob( os.path.join(kitti_prepare.label_file_path, '*.txt')) print('%d files in \n%s' % (len(label_file_list), kitti_prepare.label_file_path)) rawh5f_file_path = kitti_prepare.rawh5f_path if not os.path.exists(rawh5f_file_path): os.makedirs(rawh5f_file_path) for l_f_n in label_file_list: file_name = os.path.splitext(os.path.basename(l_f_n))[0] label_file_name = os.path.join(kitti_prepare.label_file_path, file_name + '.txt') point_cloud_file_name = os.path.join( kitti_prepare.point_cloud_file_path, file_name + '.bin') h5f_file_name = os.path.join(rawh5f_file_path, file_name + '.h5') with h5py.File(h5f_file_name, 'w') as h5f: raw_h5f = Raw_H5f(h5f, h5f_file_name, 'KITTI') point_cloud_data = read_point_cloud_from_bin( point_cloud_file_name) label_data = read_label_from_txt(label_file_name) num_row = point_cloud_data.shape[0] raw_h5f.set_num_default_row(num_row) raw_h5f.append_to_dset('xyz', point_cloud_data[:, 0:3]) num_row = label_data.shape[0] raw_h5f.set_num_default_row(num_row) raw_h5f.append_to_dset('bounding_box', label_data) raw_h5f.create_done()
def gen_stanford_indoor3d_to_rawh5f(): file_list = glob.glob( os.path.join(Indoor3d_Prepare.raw_npy_path, '*.npy')) print('%d files in \n%s' % (len(file_list), Indoor3d_Prepare.raw_npy_path)) rawh5f_path = Indoor3d_Prepare.rawh5f_path if not os.path.exists(rawh5f_path): os.makedirs(rawh5f_path) for fn in file_list: base_name = os.path.splitext(os.path.basename(fn))[0] h5_fn = os.path.join(rawh5f_path, base_name + '.rh5') with h5py.File(h5_fn, 'w') as h5f: raw_h5f = Raw_H5f(h5f, h5_fn, 'STANFORD_INDOOR3D') data = np.load(fn) num_row = data.shape[0] raw_h5f.set_num_default_row(num_row) raw_h5f.append_to_dset('xyz', data[:, 0:3]) raw_h5f.append_to_dset('color', data[:, 3:6]) raw_h5f.append_to_dset('label', data[:, 6:7]) raw_h5f.create_done()
def WriteRawH5f_Region_Ply(k_region,rs_zf,house_name,house_h5f_dir,house_dir_extracted): file_name = 'region'+str(k_region) region_ply_fn = zip_extract('region_segmentations',house_name,file_name,'ply',rs_zf,house_dir_extracted) rawh5f_fn = house_h5f_dir+'/rawh5f/region'+str(k_region)+'.rh5' IsDelVexMultiSem = True with open(region_ply_fn,'r') as ply_fo, h5py.File(rawh5f_fn,'w') as h5f: vertex_xyz,vertex_nxnynz,vertex_rgb,vertex_semantic,face_vertex_indices,face_semantic = parse_ply_file(ply_fo,IsDelVexMultiSem) raw_h5f = Raw_H5f(h5f,rawh5f_fn,'MATTERPORT') raw_h5f.set_num_default_row(vertex_xyz.shape[0]) raw_h5f.append_to_dset('xyz',vertex_xyz) raw_h5f.append_to_dset('nxnynz',vertex_nxnynz) raw_h5f.append_to_dset('color',vertex_rgb) raw_h5f.append_to_dset('label_category',vertex_semantic[:,0]) # category_id raw_h5f.append_to_dset('label_instance',vertex_semantic[:,1]) # segment_id raw_h5f.append_to_dset('label_material',vertex_semantic[:,0]) # material_id raw_h5f.create_done() raw_h5f.show_h5f_summary_info() return file_name
def Load_Raw_Scannet_Pickle(self): file_name = os.path.join(SCANNET_DATA_DIR,'scannet_%s.pickle'%(self.split)) rawh5f_dir = self.rawh5f_dir_base if not os.path.exists(rawh5f_dir): os.makedirs(rawh5f_dir) with open(file_name,'rb') as fp: scene_points_list = pickle.load(fp) semantic_labels_list = pickle.load(fp) print('%d scans for file:\n %s'%(len(semantic_labels_list),file_name)) for n in range(len(semantic_labels_list)): # write one RawH5f file for one scane rawh5f_fn = os.path.join(rawh5f_dir,self.split+'_scan_%d.rh5'%(n)) num_points = semantic_labels_list[n].shape[0] with h5py.File(rawh5f_fn,'w') as h5f: raw_h5f = Raw_H5f(h5f,rawh5f_fn,'SCANNET') raw_h5f.set_num_default_row(num_points) raw_h5f.append_to_dset('xyz',scene_points_list[n]) raw_h5f.append_to_dset('label',semantic_labels_list[n]) raw_h5f.create_done()
def WriteRawH5f_MODELNET40(txt_path, rawh5f_dir): tmp = txt_path.split('/') rawh5f_fn = os.path.join(rawh5f_dir, tmp[-2], os.path.splitext(tmp[-1])[0] + '.rh5') if not os.path.exists(os.path.dirname(rawh5f_fn)): os.makedirs(os.path.dirname(rawh5f_fn)) if Raw_H5f.check_rh5_intact(rawh5f_fn)[0]: print('rh5 intact: %s' % (rawh5f_fn)) return rawh5f_fn print('start write rh5: %s' % (rawh5f_fn)) data = np.loadtxt(txt_path, delimiter=',').astype(np.float32) num_points = data.shape[0] print(num_points) with h5py.File(rawh5f_fn, 'w') as h5f: raw_h5f = Raw_H5f(h5f, rawh5f_fn, 'MODELNET40') raw_h5f.set_num_default_row(num_points) raw_h5f.append_to_dset('xyz', data[:, 0:3]) if data.shape[1] == 6: raw_h5f.append_to_dset('nxnynz', data[:, 3:6]) raw_h5f.rh5_create_done() return txt_path
def WriteRawH5f_SCANNET(fn, rawh5f_dir): # save as rh5 import SCANNET_util fn_base = os.path.basename(fn) rawh5f_fn = os.path.join(rawh5f_dir, fn_base + '.rh5') if Raw_H5f.check_rh5_intact(rawh5f_fn)[0]: print('rh5 intact: %s' % (rawh5f_fn)) return fn print('start write rh5: %s' % (rawh5f_fn)) scene_points, instance_labels, semantic_labels, mesh_labels = SCANNET_util.parse_raw_SCANNET( fn) num_points = scene_points.shape[0] with h5py.File(rawh5f_fn, 'w') as h5f: raw_h5f = Raw_H5f(h5f, rawh5f_fn, 'SCANNET') raw_h5f.set_num_default_row(num_points) raw_h5f.append_to_dset('xyz', scene_points[:, 0:3]) raw_h5f.append_to_dset('color', scene_points[:, 3:6]) raw_h5f.append_to_dset('label_category', semantic_labels) raw_h5f.append_to_dset('label_instance', instance_labels) raw_h5f.append_to_dset('label_mesh', mesh_labels) raw_h5f.rh5_create_done() return fn
def WriteRawH5f_ETH(fn_txt, rawh5f_dir): import ETH_util fn_base = os.path.basename(fn_txt) fn_base = os.path.splitext(fn_base)[0] if fn_base[-3:] == 'txt': fn_base = os.path.splitext(fn_base)[0] rawh5f_fn = os.path.join(rawh5f_dir, fn_base + '.rh5') if Raw_H5f.check_rh5_intact(rawh5f_fn)[0]: print('rh5 intact: %s' % (rawh5f_fn)) return fn_txt print('start write rh5: %s' % (rawh5f_fn)) fn_labels = os.path.splitext(fn_txt)[0] + '.labels' #xyz, intensity, rgb, labels = ETH_util.parse_raw_ETH( fn_txt ) num_points = 1e7 with h5py.File(rawh5f_fn, 'w') as h5f: raw_h5f = Raw_H5f(h5f, rawh5f_fn, 'ETH') raw_h5f.set_num_default_row(num_points) with open(fn_txt, 'r') as txtf: # {x, y, z, intensity, r, g, b} n_read = 0 buf_size = int(1e7) while True: lines = txtf.readlines(buf_size) if len(lines) == 0: break lines = [ np.fromstring(line.strip(), dtype=np.float32, sep=' ').reshape(1, -1) for line in lines ] buf = np.concatenate(lines, 0) raw_h5f.append_to_dset('xyz', buf[:, 0:3]) raw_h5f.append_to_dset('color', buf[:, 3:6]) raw_h5f.append_to_dset('intensity', buf[:, 6:7]) n_read += buf.shape[0] print('data read: %d line \t%s' % (n_read, fn_base)) if os.path.exists(fn_labels): with open(fn_labels, 'r') as labelsf: buf_size = int(1e7) n_read_l = 0 while True: lines = labelsf.readlines(buf_size) if len(lines) == 0: break lines = [ np.fromstring(line.strip(), dtype=np.int32, sep=' ').reshape(1, -1) for line in lines ] buf = np.concatenate(lines, 0) raw_h5f.append_to_dset('label_category', buf) n_read_l += buf.shape[0] print('label read: %d line \t%s' % (n_read_l, fn_base)) assert n_read == n_read_l raw_h5f.rh5_create_done() if IsLablesExist: f_labels.close() print('finish : %s' % (rawh5f_fn)) return rawh5f_fn
def GenObj_RawH5f(self): file_name = self.house_rawh5f_dir+'/region5.rh5' xyz_cut_rate= [0,0,0.9] with h5py.File(file_name,'r') as h5f: rawh5f = Raw_H5f(h5f,file_name) rawh5f.generate_objfile(IsLabelColor=True,xyz_cut_rate=xyz_cut_rate)