def run(self): exporter = ModelExporter(self.gen_pred_config()) rm_n_mkdir(self.model_export_dir) exporter.export_compact( filename="{}/compact.pb".format(self.model_export_dir)) exporter.export_serving( os.path.join(self.model_export_dir, "serving"), signature_name="serving_default", ) print(f"Saved model to {self.model_export_dir}.")
def run(self, save_only): if self.inf_auto_find_chkpt: self.inf_model_path = os.path.join(self.save_dir, str(max([int(x) for x in [name for name in os.listdir(self.save_dir) if os.path.isdir(os.path.join(self.save_dir, name))]]))) print(f"Inference model path: <{self.inf_model_path}>") print('-----Auto Selecting Checkpoint Basing On "%s" Through "%s" Comparison' % \ (self.inf_auto_metric, self.inf_auto_comparator)) model_path, stat = get_best_chkpts(self.inf_model_path, self.inf_auto_metric, self.inf_auto_comparator) print('Selecting: %s' % model_path) print('Having Following Statistics:') for key, value in stat.items(): print('\t%s: %s' % (key, value)) else: model_path = self.inf_model_path model_constructor = self.get_model() pred_config = PredictConfig( model = model_constructor(), session_init = get_model_loader(model_path), input_names = self.eval_inf_input_tensor_names, output_names = self.eval_inf_output_tensor_names) predictor = OfflinePredictor(pred_config) if save_only: exporter = ModelExporter(pred_config) rm_n_mkdir(self.model_export_dir) print ('{}/compact.pb'.format(self.model_export_dir)) exporter.export_compact(filename='{}/compact.pb'.format(self.model_export_dir)) exporter.export_serving(os.path.join(self.model_export_dir, 'serving'), signature_name='serving_default') return for num, data_dir in enumerate(self.inf_data_list): save_dir = os.path.join(self.inf_output_dir, str(num)) file_list = glob.glob(os.path.join(data_dir, '*{}'.format(self.inf_imgs_ext))) file_list.sort() # ensure same order rm_n_mkdir(save_dir) for filename in file_list: filename = os.path.basename(filename) basename = filename.split('.')[0] print(data_dir, basename, end=' ', flush=True) ## img = cv2.imread(os.path.join(data_dir, filename)) img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) ## pred_map = self.__gen_prediction(img, predictor) sio.savemat(os.path.join(save_dir,'{}.mat'.format(basename)), {'result':[pred_map]}) print(f"Finished. {datetime.now().strftime('%H:%M:%S.%f')}")
def save_model(model_paths, model, target="", compact=False): """Save a model to given dir""" from os import path from os import makedirs import tensorpack as tp from tensorpack.tfutils.varmanip import get_checkpoint_path from tensorpack.tfutils.export import ModelExporter import misc.logger as logger _L = logger.getLogger("Saver") save_to_modeldir = target is "" for model_path in model_paths: # get model path real_path = get_checkpoint_path(model_path) abs_p = path.realpath(model_path) if (not path.isfile(abs_p)): _L.error("{} is not a model file".format(model_path)) continue # save to same folder as model if (save_to_modeldir): target = path.dirname(abs_p) # make sure the folder exists if not path.exists(target): makedirs(target) conf = tp.PredictConfig(session_init=tp.get_model_loader(model_path), model=model, input_names=["input"], output_names=["emb"]) exporter = ModelExporter(conf) if (compact): out = path.join(target, "{}.pb".format(path.basename(real_path))) _L.info("saving {} to {}".format(path.basename(real_path), out)) exporter.export_compact(out) else: _L.info("compact saving {} to {}".format(path.basename(real_path), target)) exporter.export_serving(target)