def load_model_for_eval(self, model_name):
     untrained_trunk = model_name in const.UNTRAINED_TRUNK_ALIASES
     untrained_trunk_and_embedder = model_name in const.UNTRAINED_TRUNK_AND_EMBEDDER_ALIASES
     trunk_model = self.get_trunk_model(self.args.models["trunk"])
     if untrained_trunk:
         embedder_model = architectures.misc_models.Identity()
     else:
         embedder_model = self.get_embedder_model(
             self.args.models["embedder"], self.base_model_output_size)
         if not untrained_trunk_and_embedder:
             if model_name in const.TRAINED_ALIASES:
                 _, model_name = pml_cf.latest_version(self.model_folder,
                                                       best=True)
             pml_cf.load_dict_of_models(
                 {
                     "trunk": trunk_model,
                     "embedder": embedder_model
                 },
                 model_name,
                 self.model_folder,
                 self.device,
                 log_if_successful=True,
                 assert_success=True)
     return torch.nn.DataParallel(trunk_model), torch.nn.DataParallel(
         embedder_model)
def latest_sub_experiment_epochs(sub_experiment_dir_dict):
    latest_epochs = {}
    for sub_experiment_name, folders in sub_experiment_dir_dict.items():
        model_folder = folders["models"]
        latest_epochs[sub_experiment_name], _ = pml_cf.latest_version(
            model_folder)
    return latest_epochs
Example #3
0
def latest_sub_experiment_epochs(sub_experiment_dir_dict):
    latest_epochs = {}
    for sub_experiment_name, folders in sub_experiment_dir_dict.items():
        model_folder = folders[0]
        latest_epochs[sub_experiment_name] = pml_cf.latest_version(
            model_folder, "trunk_*.pth") or 0
    return latest_epochs
def load_model_for_eval(model_factory,
                        model_args,
                        model_name,
                        factory_kwargs,
                        model_folder=None,
                        device=None):
    untrained_trunk = model_name in const.UNTRAINED_TRUNK_ALIASES
    untrained_trunk_and_embedder = model_name in const.UNTRAINED_TRUNK_AND_EMBEDDER_ALIASES
    trunk_model = model_factory.create(named_specs=model_args,
                                       subset="trunk",
                                       **factory_kwargs)
    if untrained_trunk:
        embedder_model = pml_cf.Identity()
    else:
        embedder_model = model_factory.create(named_specs=model_args,
                                              subset="embedder",
                                              **factory_kwargs)
        if not untrained_trunk_and_embedder:
            if model_name in const.TRAINED_ALIASES:
                _, model_name = pml_cf.latest_version(model_folder, best=True)
            pml_cf.load_dict_of_models(
                {
                    "trunk": trunk_model,
                    "embedder": embedder_model
                },
                model_name,
                model_folder,
                device,
                log_if_successful=True,
                assert_success=True)
    return torch.nn.DataParallel(trunk_model), torch.nn.DataParallel(
        embedder_model)
Example #5
0
 def get_eval_dict(self, best, untrained_trunk, untrained_trunk_and_embedder, randomize_embedder):
     eval_dict = {}
     if untrained_trunk:
         eval_dict[const.UNTRAINED_TRUNK] = (const.UNTRAINED_TRUNK_INT, True)
     if untrained_trunk_and_embedder:
         eval_dict[const.UNTRAINED_TRUNK_AND_EMBEDDER] = (const.UNTRAINED_TRUNK_AND_EMBEDDER_INT, randomize_embedder)
     if best:
         best_epoch, _ = pml_cf.latest_version(self.model_folder, best=True)
         eval_dict["best"] = (best_epoch, True)
     return eval_dict
 def load_model_for_eval(self, suffix):
     untrained = suffix == "-1"
     trunk_model = self.get_trunk_model(self.args.models["trunk"])
     if not untrained:
         if suffix == "best":
             _, suffix = pml_cf.latest_version(self.model_folder, best=True)
         embedder_model = self.get_embedder_model(self.args.models["embedder"], self.base_model_output_size)
         pml_cf.load_dict_of_models(
             {"trunk": trunk_model, "embedder": embedder_model},
             suffix,
             self.model_folder,
             self.device,
             log_if_successful = True,
             assert_success = True
         )
     else:
         embedder_model = architectures.misc_models.Identity()
     return torch.nn.DataParallel(trunk_model), torch.nn.DataParallel(embedder_model)
 def should_train(self, num_epochs, split_scheme_name):
     best_epoch, _ = pml_cf.latest_version(self.model_folder, best=True)
     return self.hooks.patience_remaining(
         self.epoch, best_epoch, self.args.patience
     ) and self.latest_sub_experiment_epochs[split_scheme_name] < num_epochs