def mesh_to_example(codebase_root_dir, mesh_path, dirpath, skip_existing, log_level): # Logging level must be specified because mesh_to_example is an entry point # for a subprocess call. log.set_level(log_level) ldif_path = path_util.get_path_to_ldif_root() if not skip_existing or not os.path.isfile( f'{dirpath}/depth_and_normals.npz'): sp.check_output( f'{codebase_root_dir}/scripts/process_mesh_local.sh {mesh_path} {dirpath} {ldif_path}', shell=True) write_depth_and_normals_npz(dirpath, f'{dirpath}/depth_and_normals.npz') else: log.verbose(f'Skipping shell script processing for {dirpath},' ' the output already exists.') # Precompute the dodeca samples for later: e = example.InferenceExample.from_directory(dirpath) sample_path = e.precomputed_surface_samples_from_dodeca_path if not skip_existing or not os.path.isfile(sample_path): e.surface_sample_count = 100000 precomputed_samples = e.surface_samples_from_dodeca assert precomputed_samples.shape[0] == 100000 assert precomputed_samples.shape[1] == 6 file_util.write_points(sample_path, precomputed_samples) else: log.verbose( f'Skipping surface sample precompution for {dirpath}, it\'s already done.' )
def get_model_root(): """Finds the path to the trained model's root directory based on flags.""" ldif_abspath = path_util.get_path_to_ldif_root() model_dir_is_relative = FLAGS.model_directory[0] != '/' if model_dir_is_relative: model_dir_path = os.path.join(ldif_abspath, FLAGS.model_directory) else: model_dir_path = FLAGS.model_directory if not os.path.isdir(model_dir_path): os.makedirs(model_dir_path) return model_dir_path
def get_model_root(): """Finds the path to the trained model's root directory based on flags.""" ldif_abspath = path_util.get_path_to_ldif_root() model_dir_is_relative = FLAGS.model_directory[0] != '/' if model_dir_is_relative: model_dir_path = os.path.join(ldif_abspath, FLAGS.model_directory) else: model_dir_path = FLAGS.model_directory if not os.path.isdir(model_dir_path): raise ValueError(f'Could not find model directory {model_dir_path}') return model_dir_path
def get_npz_paths(split, category, modifier=''): """Returns the list of npz paths for the split's ground truth.""" t = time.time() ensure_split_valid(split) filelist = os.path.join( path_util.get_path_to_ldif_root(), 'data/basedirs/%s-%s%s.txt' % (split, category, modifier)) try: filenames = file_util.readlines(filelist) except: raise ValueError('Unable to read filelist %s.' % filelist) tf.logging.info('Loaded filelist in %0.2fms.', (time.time() - t)) return filenames
def mesh_to_example(codebase_root_dir, mesh_path, dirpath, skip_existing, log_level): # Logging level must be specified because mesh_to_example is an entry point # for a subprocess call. log.set_level(log_level) ldif_path = path_util.get_path_to_ldif_root() if not skip_existing or not os.path.isfile( f'{dirpath}/depth_and_normals.npz'): sp.check_output( f'{codebase_root_dir}/scripts/process_mesh_local.sh {mesh_path} {dirpath} {ldif_path}', shell=True) # write_depth_and_normals_npz(dirpath, f'{dirpath}/depth_and_normals.npz') else: log.verbose(f'Skipping shell script processing for {dirpath},' ' the output already exists.')
def __init__(self, is_local): if is_local: self.path = os.path.join(path_util.get_path_to_ldif_root(), 'data/hashlist.csv') else: raise ValueError('Remote hashlist no longer supported.') with file_util.open_file(self.path, 'rt') as f: self.df = pd.read_csv( f, dtype={ 'split': str, 'synset': str, 'hash': str, 'synset_inv_freq': np.float32 }, usecols=['split', 'synset', 'hash', 'synset_inv_freq'])
def local_root(self): return os.path.join(path_util.get_path_to_ldif_root(), 'result_store')
def setUp(self): super(ModelTest, self).setUp() self.test_data_directory = os.path.join( path_util.get_path_to_ldif_root(), 'test_data')