def run(self, network_pkl, run_dir=None, dataset_args=None, mirror_augment=None, num_gpus=1, tf_config=None, log_results=True): self._network_pkl = network_pkl self._dataset_args = dataset_args self._mirror_augment = mirror_augment self._results = [] if (dataset_args is None or mirror_augment is None) and run_dir is not None: run_config = misc.parse_config_for_previous_run(run_dir) self._dataset_args = dict(run_config['dataset']) self._dataset_args['shuffle_mb'] = 0 self._mirror_augment = run_config['train'].get('mirror_augment', False) time_begin = time.time() with tf.Graph().as_default(), tflib.create_session(tf_config).as_default(): # pylint: disable=not-context-manager _G, _D, Gs = misc.load_pkl(self._network_pkl) self._evaluate(Gs, num_gpus=num_gpus) self._eval_time = time.time() - time_begin if log_results: result_str = self.get_result_str() if run_dir is not None: log = os.path.join(run_dir, 'metric-%s.txt' % self.name) with dnnlib.util.Logger(log, 'a'): print(result_str) else: print(result_str)
def _reset(self, network_pkl=None, run_dir=None, data_dir=None, dataset_args_train=None, dataset_args=None, mirror_augment=None): if self._dataset_obj is not None: self._dataset_obj.close() self._network_pkl = network_pkl self._data_dir = data_dir self._dataset_args = dataset_args self._dataset_args_train = dataset_args_train self._dataset_obj = None self._mirror_augment = mirror_augment self._eval_time = 0 self._results = [] if (dataset_args is None or mirror_augment is None) and run_dir is not None: run_config = misc.parse_config_for_previous_run(run_dir) self._dataset_args_train = dict(run_config['dataset']) self._dataset_args_train['shuffle_mb'] = 0 self._dataset_args_train['max_images'] = None self._dataset_args_train['skip_images'] = None self._dataset_args = dict(run_config['dataset_eval']) self._dataset_args['shuffle_mb'] = 0 self._dataset_args['max_images'] = None self._dataset_args['skip_images'] = None self._mirror_augment = run_config['train'].get( 'mirror_augment', False)
def run(self, network_pkl, run_dir=None, dataset_args=None, mirror_augment=None, num_gpus=1, tf_config=None, log_results=True, model_type="rignet"): create_dir(config.EVALUATION_DIR, exist_ok=True) self._network_pkl = network_pkl self._dataset_args = dataset_args self._mirror_augment = mirror_augment self._results = [] self.model_type = model_type if (dataset_args is None or mirror_augment is None) and run_dir is not None: run_config = misc.parse_config_for_previous_run(run_dir) self._dataset_args = dict(run_config['dataset']) self._dataset_args['shuffle_mb'] = 0 self._mirror_augment = run_config['train'].get( 'mirror_augment', False) time_begin = time.time() with tf.Graph().as_default(), tflib.create_session( tf_config).as_default(): # pylint: disable=not-context-manager E, _G, _D, Gs = misc.load_pkl(self._network_pkl) print("Loaded Encoder") Inv, _, _, _ = misc.load_pkl(config.INVERSION_PICKLE_DIR) print("Loaded Inv") self._evaluate(Gs, E, Inv, num_gpus=num_gpus) self._eval_time = time.time() - time_begin if log_results: result_str = self.get_result_str() if run_dir is not None: log = os.path.join(run_dir, 'metric-%s.txt' % self.name) with dnnlib.util.Logger(log, 'a'): print(result_str) else: print(result_str) result_path = os.path.join( config.EVALUATION_DIR, "result_" + convert_pickle_path_to_name(self._network_pkl) + ".txt") write_to_file(result_str + "\n\n\n", result_path)
def _reset(self, network_pkl=None, run_dir=None, data_dir=None, dataset_args=None, mirror_augment=None): if self._dataset_obj is not None: self._dataset_obj.close() self._network_pkl = network_pkl self._data_dir = data_dir self._dataset_args = dataset_args self._eval_time = 0 self._results = [] if (dataset_args is None or mirror_augment is None) and run_dir is not None: run_config = misc.parse_config_for_previous_run(run_dir) self._dataset_args = dict(run_config['dataset']) self._dataset_args['shuffle_mb'] = 0