def set_attrs(self, file): source_attrs = file['source_attrs'] _filename = filename(file['src_path']) self.source = { 'name': _filename, 'date': unix_time(source_attrs.st_atime), 'size': convert_file_size(source_attrs.st_size), 'path': dst_path(file['src_path']) } target_attrs = file['target_attrs'] self.target = { 'name': _filename, 'date': unix_time(target_attrs.st_atime), 'size': convert_file_size(target_attrs.st_size), 'path': file['dst_path'] }
from skimage import morphology if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python 1_post_process.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder for config_file in os.listdir(config_folder): print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) outputs = utils.read_hdf5(common.filename(config, 'output_file')) outputs = np.squeeze(outputs) print('[Data] read ' + common.filename(config, 'output_file')) filled = np.zeros(outputs.shape) for n in range(outputs.shape[0]): labels, num_labels = morphology.label(outputs[n], background=1, connectivity=1, return_num=True) outside_label = labels[0][0][0] filled[n][labels != outside_label] = 1 #filled[n][labels == outside_label] = 0
config_folder = argv[0] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = ['test.json'] for config_file in config_files: config = read_json(config_folder + config_file) height = config['height'] width = config['width'] depth = config['depth'] scale = 1. / max(height, width, depth) n_observations = config['n_observations'] space = read_hdf5(common.filename(config, 'space_file')) space = space.reshape((space.shape[0] * n_observations, 1, space.shape[2], space.shape[3], space.shape[4])) vis_directory = common.dirname(config, 'vis_dir') if not os.path.isdir(vis_directory): os.makedirs(vis_directory) voxel_size = 0.007 if height >= 32: voxel_size = 0.0055 if height >= 48: voxel_size = 0.004 log('[Data] voxel size ' + str(voxel_size)) N = 30
exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = [ config_file for config_file in os.listdir(config_folder) if not (config_file.find('prior') > 0) ] for config_file in config_files: print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) set = config_file[:-5] filled = utils.read_hdf5(common.filename(config, 'filled_file')) print('[Data] read ' + common.filename(config, 'filled_file')) inputs = utils.read_hdf5(common.filename(config, 'input_file')) print('[Data] read ' + common.filename(config, 'input_file')) space = utils.read_hdf5(common.filename(config, 'space_file')) print('[Data] read ' + common.filename(config, 'space_file')) print('[Data] filled: %s' % ' x '.join(map(str, filled.shape))) print('[Data] inputs: %s' % ' x '.join(map(str, inputs.shape))) print('[Data] space: %s' % ' x '.join(map(str, space.shape))) if config['sdf']: sdfs = utils.read_hdf5(common.filename(config, 'sdf_file')) print('[Data] reading ' + common.filename(config, 'sdf_file'))
config['focal_length_x'], config['focal_length_y'], config['principal_point_x'], config['principal_point_y'] ], dtype=np.float64) size = np.array([image_height, image_width], dtype=np.int32) mesh_center = (config['mesh_center_x'], config['mesh_center_y'], config['mesh_center_z']) znf = np.array([config['z_near'], config['z_far']], dtype=float) height = config['height'] width = config['width'] depth = config['depth'] suffix = config['suffix'] off_directory = config['multiplied_directory'] + '/' + set + '/' depth_file = common.filename(config, 'depth_file') angles_file = common.filename(config, 'render_orientation_file') off_files = utils.read_ordered_directory(off_directory) n_files = len(off_files) print('[Data] found %d off files in %s' % (n_files, off_directory)) n_observations = config['n_observations'] depth_maps = np.zeros( (n_files, n_observations, image_height, image_width)) rotations = np.zeros((n_files, n_observations, 3)) for n in range(n_files): base_mesh = Mesh.from_off(off_files[n]) for k in range(n_observations):
num = int(argv[1]) assert num > 0 config_files = ['test.json'] for config_file in config_files: config = read_json(config_folder + config_file) height = config['height'] width = config['width'] depth = config['depth'] scale = 1. / max(height, width, depth) n_observations = config['n_observations'] inputs = read_hdf5(common.filename(config, 'input_file', '.h5', num)) inputs = inputs.reshape( (inputs.shape[0] * n_observations, 1, inputs.shape[2], inputs.shape[3], inputs.shape[4])) vis_directory = common.dirname(config, 'vis_dir') if not os.path.isdir(vis_directory): os.makedirs(vis_directory) voxel_size = 0.007 if height >= 32: voxel_size = 0.0055 if height >= 48: voxel_size = 0.004 log('[Data] voxel size ' + str(voxel_size))
exit(1) config_file = sys.argv[1] assert os.path.exists(config_file) config = utils.read_json(config_file) set = ntpath.basename(config_file)[:-5] print('[Data] processing ' + set) split = [] with open(config ['split_file'], 'r') as f: split = f.readlines() split = [int(line.strip()) for line in split if line.strip() != ''] bounding_box_directory = common.filename(config, 'bounding_box_directory', '', set) label_directory = config['label_directory'] multiplier = config['multiplier'] if not os.path.exists(bounding_box_directory): os.makedirs(bounding_box_directory) num_bounding_boxes = 0 for i in split: bounding_box_file = label_directory + '/%06d.txt' % i bounding_boxes = BoundingBox.from_kitti(bounding_box_file) multiplied_bounding_boxes = [] for bounding_box in bounding_boxes: if bounding_box.type.lower() == config['object_type']: for j in range(multiplier):
'[Error] $BLENDER --background --python 12_3_visualize_binvox.py -- 1>/dev/null config_folder', LogLevel.ERROR) exit() config_file = argv[0] assert os.path.exists(config_file), 'file %s does not exist' % config_file config = read_json(config_file) set = ntpath.basename(config_file)[:-5] height = config['height'] width = config['width'] depth = config['depth'] scale = 1. / max(height, width, depth) space = read_hdf5(common.filename(config, 'part_space_file', '_f.h5', set)) log(space.shape) vis_directory = common.filename(config, 'vis_dir', '', set) if not os.path.isdir(vis_directory): os.makedirs(vis_directory) voxel_size = 0.007 if height >= 32: voxel_size = 0.0055 if height >= 48: voxel_size = 0.004 log('[Data] voxel size ' + str(voxel_size)) N = 30 log('[Data] %d samples' % space.shape[0])
import common if __name__ == '__main__': if len(sys.argv) < 2: print('Usage!') config_file = sys.argv[1] assert os.path.exists(config_file) print('[Data] reading ' + config_file) config = utils.read_json(config_file) dataset = ntpath.basename(config_file)[:-5] inputs = utils.read_hdf5( common.filename(config, 'input_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_file', '_f.h5', dataset)) inputs_sdf = utils.read_hdf5( common.filename(config, 'input_sdf_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_sdf_file', '_f.h5', dataset)) inputs_tsdf = utils.read_hdf5( common.filename(config, 'input_tsdf_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_tsdf_file', '_f.h5', dataset)) inputs_ltsdf = utils.read_hdf5( common.filename(config, 'input_ltsdf_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_ltsdf_file', '_f.h5', dataset))
assert os.path.exists(config_folder + config_file), 'file %s not found' % ( config_folder + config_file) print('[Data] found ' + config_folder + config_file) for config_file in config_files: to_config = utils.read_json(to_config_folder + config_file) for key in hdf5_file_keys: data = [] print('[Data] +') for from_config_folder in config_folders: from_config = utils.read_json(from_config_folder + config_file) print('[Data] | reading ' + common.filename(from_config, key)) data.append(utils.read_hdf5(common.filename(from_config, key))) print('[Data] \'-> writing ' + common.filename(to_config, key)) data = np.concatenate(tuple(data), axis=0) utils.write_hdf5(common.filename(to_config, key), data) key = 'off_gt_dir' count = 0 for key in dir_keys: for from_config_folder in config_folders: from_config = utils.read_json(from_config_folder + config_file) from_dir = common.dirname(from_config, key) to_dir = common.dirname(to_config, key)
if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python 10_reconstruct.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = [ config_file for config_file in os.listdir(config_folder) if not (config_file.find('prior') > 0) ] for config_file in config_files: print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) set = config_file[:-5] depths = utils.read_hdf5(common.filename(config, 'depth_file')) print('[Data] read ' + common.filename(config, 'depth_file')) print('[Data] depths: %s' % ' x '.join(map(str, depths.shape))) for i in range(min(25, depths.shape[0])): n = i print('[Data] visualizing %s %d/%d' % (set, (n + 1), depths.shape[0])) pyplot.clf() pyplot.imshow(depths[n][0], interpolation='none') pyplot.savefig('%s_%d_depth.png' % (set, n))
print('[Data] Usage python 1_post_process.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = [config_file for config_file in os.listdir(config_folder)] for config_file in config_files: print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) if config['synthetic_sdf']: truncation = config['truncation'] filled_file = common.filename(config, 'filled_file') filled = utils.read_hdf5(filled_file) print('[Data] read ' + filled_file) sdfs = np.zeros((filled.shape)) sdfs = np.squeeze(sdfs) for n in range(filled.shape[0]): positive_df = ndimage.distance_transform_edt(1 - filled[n]) negative_df = ndimage.distance_transform_edt(filled[n]) sdf = np.zeros(filled[n].shape) mask = (filled[n] == 1) sdf[mask] = -negative_df[mask] mask = (filled[n] == 0) sdf[mask] = positive_df[mask]
if len(sys.argv) < 2: print('[Data] Usage python 10_reconstruct.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists(config_folder), 'directory %s does not exist' % config_folder config_files = [config_file for config_file in os.listdir(config_folder) if not (config_file.find('prior') > 0)] for config_file in config_files: print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) set = config_file[:-5] assert ('sdf' in config.keys() and config['sdf']) or ('synthetic_sdf' in config.keys() and config['synthetic_sdf']) input_sdfs = utils.read_hdf5(common.filename(config, 'input_sdf_file')) print('[Data] read ' + common.filename(config, 'input_sdf_file')) input_ltsdfs = utils.read_hdf5(common.filename(config, 'input_ltsdf_file')) print('[Data] read ' + common.filename(config, 'input_ltsdf_file')) output_sdfs = utils.read_hdf5(common.filename(config, 'sdf_file')) print('[Data] reading ' + common.filename(config, 'sdf_file')) output_ltsdfs = utils.read_hdf5(common.filename(config, 'ltsdf_file')) print('[Data] reading ' + common.filename(config, 'ltsdf_file')) print('[Data] input_sdfs: %s' % ' x '.join(map(str, input_sdfs.shape))) print('[Data] input_ltsdfs: %s' % ' x '.join(map(str, input_ltsdfs.shape))) print('[Data] output_sdfs: %s' % ' x '.join(map(str, output_sdfs.shape))) print('[Data] output_ltsdfs: %s' % ' x '.join(map(str, output_ltsdfs.shape))) for i in range(min(25, output_sdfs.shape[0])):
if __name__ == '__main__': if len(sys.argv) < 2: print('[Experiments] use python visualize_experiment.py config_file') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists(config_folder) config_files = ['training_inference.json', 'test.json'] for config_file in config_files: config = utils.read_json(config_folder + config_file) print('[Data] read ' + config_folder + config_file) n_observations = config['n_observations'] statistics_file = common.filename(config, 'statistics_file', '.txt') with open(statistics_file, 'w') as f: targets = utils.read_hdf5(common.filename(config, 'output_file')) targets = np.repeat(targets, n_observations, axis=0) print('[Data] read ' + common.filename(config, 'output_file')) inputs = utils.read_hdf5(common.filename(config, 'input_file')) inputs = inputs.reshape( (inputs.shape[0] * n_observations, 1, inputs.shape[2], inputs.shape[3], inputs.shape[4])) print('[Data] read ' + common.filename(config, 'input_file')) space = utils.read_hdf5(common.filename(config, 'space_file')) space = space.reshape(
from mesh import Mesh from point_cloud import PointCloud import ntpath import common if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python create_cuboids.py config_folder') exit(1) config_file = sys.argv[1] assert os.path.exists(config_file) config = utils.read_json(config_file) dataset = ntpath.basename(config_file)[:-5] orig_dir = common.filename(config, 'bounding_box_txt_directory', '', dataset) gt_dir = common.filename(config, 'velodyne_gt_txt_directory', '', dataset) gt_files = utils.read_ordered_directory(gt_dir) for i in range(len(gt_files)): orig_file = orig_dir + '/' + ntpath.basename(gt_files[i]) orig_point_cloud = PointCloud.from_txt(orig_file) gt_point_cloud = PointCloud.from_txt(gt_files[i]) print('[Data] read ' + orig_file) print('[Data] read ' + gt_files[i]) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') xx = gt_point_cloud.points[:, 0] yy = gt_point_cloud.points[:, 1]
config_file = sys.argv[1] dataset = ntpath.basename(config_file)[:-5] base_directory = dataset + '/' if not os.path.exists(base_directory): os.makedirs(base_directory) config = utils.read_json(config_file) #statistics = utils.read_hdf5('/work/data/shapenet_3dop/real_space_statistics_training_prior.h5') #statistics = 1 - statistics #print('[Data] read statistics') inputs = utils.read_hdf5( common.filename(config, 'input_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_file', '_f.h5', dataset)) space = utils.read_hdf5( common.filename(config, 'part_space_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'part_space_file', '_f.h5', dataset)) #statistics = statistics.reshape(1, 1, statistics.shape[0], statistics.shape[1], statistics.shape[2]) #statistics = np.repeat(statistics, space.shape[0], axis=0) #print(space.shape, statistics.shape) #invalid_space = space*statistics points = []
print('[Data] Usage python 1_post_process.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder for config_file in os.listdir(config_folder): print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) # First we make a minor fix for the space. # We do not allow voxels marked as free space which are also occupied, # but during voxelization this usually happens! space_file = common.filename(config, 'space_file') if os.path.exists(space_file): space = utils.read_hdf5(space_file) input_file = common.filename(config, 'input_file') input = utils.read_hdf5(input_file) space[input == 1] = 0 if len(space.shape) < 5: space = np.expand_dims(space, axis=1) utils.write_hdf5(space_file, space) print('[Data] wrote ' + space_file) keys = [ 'input', 'space', 'output', 'filled', 'sdf', 'tsdf', 'ltsdf', 'input_sdf', 'input_tsdf', 'input_ltsdf'
import binvox_rw if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python 10_reconstruct.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder for config_file in os.listdir(config_folder): print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) filled_file = common.filename(config, 'filled_file') assert os.path.exists( filled_file), 'file %s does not exist' % filled_file filled = utils.read_hdf5(filled_file) filled = filled.squeeze() binvox_directory = common.dirname(config, 'binvox_dir') utils.makedir(binvox_directory) for n in range(filled.shape[0]): model = binvox_rw.Voxels(filled[n] > 0.5, filled[n].shape, (0, 0, 0), 1) binvox_file = binvox_directory + str(n) + '.binvox' with open(binvox_file, 'w') as fp: model.write(fp)
if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python 1_post_process.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = [config_file for config_file in os.listdir(config_folder)] for config_file in config_files: print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) truncation = config['truncation'] sdfs = utils.read_hdf5(common.filename(config, 'sdf_file')) tsdfs = sdfs.copy() tsdfs[tsdfs > truncation] = truncation tsdfs[tsdfs < -truncation] = -truncation ltsdfs = tsdfs.copy() ltsdfs[ltsdfs > 0] = np.log(ltsdfs[ltsdfs > 0] + 1) ltsdfs[ltsdfs < 0] = -np.log(np.abs(ltsdfs[ltsdfs < 0]) + 1) tsdf_file = common.filename(config, 'tsdf_file') ltsdf_file = common.filename(config, 'ltsdf_file') utils.write_hdf5(tsdf_file, tsdfs) print('[Data] wrote ' + tsdf_file) utils.write_hdf5(ltsdf_file, ltsdfs)
if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python create_cuboids.py config_folder') exit(1) config_file = sys.argv[1] assert os.path.exists(config_file) print('[Data] reading ' + config_file) config = utils.read_json(config_file) set = ntpath.basename(config_file)[:-5] # ['input_file', 'input_sdf_file', 'full_space_file', 'part_space_file'] size = 0 inputs_file = common.filename(config, 'input_file', '.h5', set) inputs = utils.read_hdf5(inputs_file) print('[Data] read ' + inputs_file) inputs_sdf_file = common.filename(config, 'input_sdf_file', '.h5', set) inputs_sdf = utils.read_hdf5(inputs_sdf_file) print('[Data] read ' + inputs_sdf_file) if config['gt']: inputs_combined_gt_file = common.filename(config, 'input_combined_gt_file', '.h5', set) inputs_combined_gt = utils.read_hdf5(inputs_combined_gt_file) print('[Data] read ' + inputs_combined_gt_file) #inputs_sdf_combined_gt_file = common.filename(config, 'input_sdf_combined_gt_file', '.h5', set)
] dir_keys = ['off_gt_dir'] config_files = [ config_file for config_file in os.listdir(from_config_folder) ] for config_file in config_files: assert os.path.exists(to_config_folder + config_file) for config_file in config_files: from_config = utils.read_json(from_config_folder + config_file) to_config = utils.read_json(to_config_folder + config_file) for key in hdf5_file_keys: from_file = common.filename(from_config, key) to_file = common.filename(to_config, key) print('[Data] copying') print(' ' + from_file) print(' ' + to_file) shutil.copy(from_file, to_file) for key in dir_keys: from_dir = common.dirname(from_config, key) to_dir = common.dirname(to_config, key) # ! assert not os.path.exists(to_dir) print('[Data] copying')
import common if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python 1_post_process.py config_folder') exit(1) config_file = sys.argv[1] assert os.path.exists(config_file), 'file %s does not exist' % config_file print('[Data] reading ' + config_file) config = utils.read_json(config_file) set = ntpath.basename(config_file)[:-5] truncation = config['truncation'] input_sdfs = utils.read_hdf5(common.filename(config, 'input_sdf_file', '_f.h5', set)) input_tsdfs = input_sdfs.copy() input_tsdfs[input_tsdfs > truncation] = truncation input_tsdfs[input_tsdfs < -truncation] = -truncation input_ltsdfs = input_tsdfs.copy() input_ltsdfs[input_ltsdfs > 0] = np.log(input_ltsdfs[input_ltsdfs > 0] + 1) input_ltsdfs[input_ltsdfs < 0] = - np.log(np.abs(input_ltsdfs[input_ltsdfs < 0]) + 1) input_tsdf_file = common.filename(config, 'input_tsdf_file', '_f.h5', set) input_ltsdf_file = common.filename(config, 'input_ltsdf_file', '_f.h5', set) utils.write_hdf5(input_tsdf_file, input_tsdfs) print('[Data] wrote ' + input_tsdf_file) utils.write_hdf5(input_ltsdf_file, input_ltsdfs)
exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = [ config_file for config_file in os.listdir(config_folder) if not (config_file.find('prior') > 0) ] for config_file in config_files: print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) set = config_file[:-5] outputs = utils.read_hdf5(common.filename(config, 'filled_file')) print('[Data] read ' + common.filename(config, 'filled_file')) num = 2 inputs = utils.read_hdf5(filename(config, 'input_file', num)) print('[Data] read ' + filename(config, 'input_file', num)) space = utils.read_hdf5(filename(config, 'space_file', num)) print('[Data] read ' + filename(config, 'space_file', num)) print('[Data] outputs: %s' % ' x '.join(map(str, outputs.shape))) print('[Data] inputs: %s' % ' x '.join(map(str, inputs.shape))) print('[Data] space: %s' % ' x '.join(map(str, space.shape))) n_observations = config['n_observations']
'[Error] $BLENDER --background --python 12_1_visualize_gt.py -- 1>/dev/null config_folder', LogLevel.ERROR) exit() config_file = argv[0] assert os.path.exists(config_file), 'file %s does not exist' % config_file config = read_json(config_file) set = ntpath.basename(config_file)[:-5] height = config['height'] width = config['width'] depth = config['depth'] scale = 1. / max(height, depth, width) txt_directory = common.filename(config, 'bounding_box_txt_directory', '', set) txt_files = read_ordered_directory(txt_directory) vis_directory = common.filename(config, 'vis_dir', '', set) if not os.path.isdir(vis_directory): os.makedirs(vis_directory) N = 30 log('[Data] %d samples' % len(txt_files)) for i in range(N): n = i * (len(txt_files) // N) txt_file = txt_files[n] camera_target = initialize() txt_material = make_material('BRC_Material_Point_Cloud', (0.65, 0.23, 0.25), 1, True)
'[Error] $BLENDER --background --python 12_3_visualize_binvox.py -- 1>/dev/null config_folder', LogLevel.ERROR) exit() config_file = argv[0] assert os.path.exists(config_file), 'file %s does not exist' % config_file config = read_json(config_file) set = ntpath.basename(config_file)[:-5] height = config['height'] width = config['width'] depth = config['depth'] scale = 1. / max(height, width, depth) inputs = read_hdf5(common.filename(config, 'input_file', '_f.h5', set)) log(inputs.shape) vis_directory = common.filename(config, 'vis_dir', '', set) if not os.path.isdir(vis_directory): os.makedirs(vis_directory) voxel_size = 0.007 if height >= 32: voxel_size = 0.0055 if height >= 48: voxel_size = 0.004 log('[Data] voxel size ' + str(voxel_size)) N = 30 log('[Data] %d samples' % inputs.shape[0])
config_folder = argv[0] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = ['test.json'] for config_file in config_files: config = read_json(config_folder + config_file) height = config['height'] width = config['width'] depth = config['depth'] scale = 1. / max(height, width, depth) n_observations = config['n_observations'] filled = read_hdf5(common.filename(config, 'filled_file')) filled = np.repeat(filled, n_observations, axis=0) vis_directory = common.dirname(config, 'vis_dir') if not os.path.isdir(vis_directory): os.makedirs(vis_directory) voxel_size = 0.007 if height >= 32: voxel_size = 0.0055 if height >= 48: voxel_size = 0.004 log('[Data] voxel size ' + str(voxel_size)) N = 30 log('[Data] %d samples' % filled.shape[0])
config_folder = argv[0] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = ['test.json'] for config_file in config_files: config = read_json(config_folder + config_file) height = config['height'] width = config['width'] depth = config['depth'] scale = 1. / max(height, width, depth) n_observations = config['n_observations'] inputs = read_hdf5(common.filename(config, 'input_file')) inputs = inputs.reshape( (inputs.shape[0] * n_observations, 1, inputs.shape[2], inputs.shape[3], inputs.shape[4])) vis_directory = common.dirname(config, 'vis_dir') if not os.path.isdir(vis_directory): os.makedirs(vis_directory) voxel_size = 0.007 if height >= 32: voxel_size = 0.0055 if height >= 48: voxel_size = 0.004 log('[Data] voxel size ' + str(voxel_size))
import common if __name__ == '__main__': if len(sys.argv) < 2: print('Usage!') config_file = sys.argv[1] assert os.path.exists(config_file) print('[Data] reading ' + config_file) config = utils.read_json(config_file) dataset = ntpath.basename(config_file)[:-5] inputs = utils.read_hdf5( common.filename(config, 'input_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_file', '_f.h5', dataset)) #inputs_sdf = utils.read_hdf5(common.filename(config, 'input_sdf_file', '_f.h5', dataset)) #print('[Data] read ' + common.filename(config, 'input_sdf_file', '_f.h5', dataset)) #inputs_combined_gt = utils.read_hdf5(common.filename(config, 'input_combined_gt_file', '_f.h5', dataset)) #print('[Data] read ' + common.filename(config, 'input_combined_gt_file', '_f.h5', dataset)) #inputs_sdf_combined_gt = utils.read_hdf5(common.filename(config, 'input_sdf_combined_gt_file', '_f.h5', dataset)) #print('[Data] read ' + common.filename(config, 'input_sdf_combined_gt_file', '_f.h5', dataset)) #inputs_gt = utils.read_hdf5(common.filename(config, 'input_gt_file', '_f.h5', dataset)) #print('[Data] read ' + common.filename(config, 'input_gt_file', '_f.h5', dataset)) #inputs_sdf_gt = utils.read_hdf5(common.filename(config, 'input_sdf_gt_file', '_f.h5', dataset)) #print('[Data] read ' + common.filename(config, 'input_sdf_gt_file', '_f.h5', dataset)) #full_space_gt = utils.read_hdf5(common.filename(config, 'full_space_gt_file', '_f.h5', dataset))
import mcubes if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python 10_reconstruct.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder for config_file in os.listdir(config_folder): print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) sdf_file = common.filename(config, 'sdf_file') assert os.path.exists(sdf_file), 'file %s does not exist' % sdf_file sdfs = utils.read_hdf5(sdf_file) sdfs = sdfs.squeeze() reconstructed_directory = common.dirname(config, 'reconstructed_dir') utils.makedir(reconstructed_directory) for n in range(sdfs.shape[0]): vertices, triangles = mcubes.marching_cubes( -sdfs[n].transpose(1, 0, 2), 0) off_file = reconstructed_directory + '/%d.off' % n mcubes.export_off(vertices, triangles, off_file) print('[Data] wrote %s' % off_file)
if __name__ == '__main__': config_file = sys.argv[1] dataset = ntpath.basename(config_file)[:-5] base_directory = dataset + '/' if not os.path.exists(base_directory): os.makedirs(base_directory) config = utils.read_json(config_file) #statistics = utils.read_hdf5('/work/data/shapenet_3dop/real_space_statistics_training_prior.h5') #statistics = 1 - statistics #print('[Data] read statistics') inputs = utils.read_hdf5(common.filename(config, 'input_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_file', '_f.h5', dataset)) inputs_combined_gt = utils.read_hdf5(common.filename(config, 'input_combined_gt_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'input_combined_gt_file', '_f.h5', dataset)) space = utils.read_hdf5(common.filename(config, 'part_space_file', '_f.h5', dataset)) print('[Data] read ' + common.filename(config, 'part_space_file', '_f.h5', dataset)) #statistics = statistics.reshape(1, 1, statistics.shape[0], statistics.shape[1], statistics.shape[2]) #statistics = np.repeat(statistics, space.shape[0], axis=0) #print(space.shape, statistics.shape) #invalid_space = space*statistics points = []