Exemplo n.º 1
0
    def load(cls, restore_dir: Union[Path, str]) -> "ConfigFileRecordable":
        d = Path(restore_dir) / full_classname(cls)

        _, _, files = list(os.walk(d))[0]

        logger.info(f"Loading Config File {files[0]} from {d}")
        return ConfigFileRecordable(d / files[0])
Exemplo n.º 2
0
 def load(cls: Type[RecordableTorchModuleMapping_T],
          restore_dir: Union[Path, str]) -> RecordableTorchModuleMapping_T:
     d = Path(restore_dir) / full_classname(cls)
     records: Dict[str, RecordableTorchModule] = {}  # OrderedDict()
     for subdir in sorted(os.listdir(d)):
         logger.debug(f"Loading {subdir}")
         records[subdir] = runtime_load_recordable_module(d / subdir)
     return cls(records)
Exemplo n.º 3
0
    def load(cls, restore_dir: Union[Path, str]) -> Query1CodeNAndAdamW:
        full_dir = Path(restore_dir) / full_classname(cls)
        logger.debug(f"Loading Query1CodeN & AdamW optimizer from {full_dir}")
        model = Query1CodeN.load(full_dir)

        state_dict = torch.load(full_dir / "adamw_state_dict.pth")
        optimizer = AdamW(model.parameters())
        optimizer.load_state_dict(state_dict)
        return Query1CodeNAndAdamW(model, optimizer)
Exemplo n.º 4
0
    def load(cls: Type[ModelAndAdamWRecordable_T], restore_dir: Union[Path, str]) -> "ModelAndAdamWRecordable_T":
        full_dir = Path(restore_dir) / full_classname(cls)
        logger.debug(f"Loading {full_classname(cls)} & AdamW optimizer from {full_dir}")
        model = cls.model_type.load(full_dir)

        state_dict = torch.load(full_dir / "adamw_state_dict.pth")
        optimizer = AdamW(model.parameters())
        optimizer.load_state_dict(state_dict)
        return cls(model, optimizer)
Exemplo n.º 5
0
    def load(cls, restore_dir: Union[Path, str]) -> "DictRecordable":
        d = Path(restore_dir) / full_classname(cls)
        logger.info(f"Loading State dict from {d}")

        # state = pickle.load(open(d / "state_dict.txt", "r"))
        f = open(d / "state_dict.json", "r")
        state = json.loads(f.read())
        f.close()

        return DictRecordable(state)
Exemplo n.º 6
0
 def load(cls: Type[Module_T], restore_dir: Union[Path, str]) -> Module_T:
     full_dir = Path(restore_dir) / full_classname(cls)
     logger.debug(f"Loading {full_classname(cls)} from {full_dir}")
     if (full_dir / "state_dict.pth").exists():
         # for compatibility with previous version using state_dict
         # but not good as default params would need to be loaded
         # I need to rework that...
         state_dict = torch.load(full_dir / "state_dict.pth")
         module = cls()
         module.load_state_dict(state_dict)
         return module
     else:
         module = torch.load(full_dir / "model.bin")
         return module
Exemplo n.º 7
0
    def load(cls: Type[PretrainedRec_T],
             restore_dir: Union[Path, str]) -> PretrainedRec_T:
        import json

        full_dir = Path(restore_dir) / full_classname(cls)
        logger.info(f"Loading HuggingFace Pretrained model from {full_dir}")
        _, dirs, _ = list(os.walk(full_dir))[0]
        model_cls_name = dirs[0]
        logger.info(
            f"Loading HuggingFace {model_cls_name} model from {full_dir}/{model_cls_name}"
        )
        klass = runtime_import(model_cls_name)
        assert issubclass(klass, PreTrainedModel)

        model = klass.from_pretrained(str(full_dir / model_cls_name))

        return cls(model)
Exemplo n.º 8
0
 def load(cls, restore_dir: Union[Path, str]) -> QueryCodeSiamese:
     d = Path(restore_dir) / full_classname(cls)
     records = runtime_load_recordable_mapping(d)
     return cls(**records)
Exemplo n.º 9
0
 def load(cls, restore_dir: Union[Path, str]) -> "HoconConfigRecordable":
     conf_file = Path(restore_dir) / full_classname(cls) / "config.conf"
     logger.info(f"Loading Config File from {conf_file}")
     conf: ConfigTree = ConfigFactory.parse_file(conf_file)
     return HoconConfigRecordable(conf)