def _torch_snapshot_object(trainer, target, filename, savefun): # make snapshot_dict dictionary s = DictionarySerializer() s.save(trainer) if hasattr(trainer.updater.model, "model"): # (for TTS) if hasattr(trainer.updater.model.model, "module"): model_state_dict = trainer.updater.model.model.module.state_dict() else: model_state_dict = trainer.updater.model.model.state_dict() else: # (for ASR) if hasattr(trainer.updater.model, "module"): model_state_dict = trainer.updater.model.module.state_dict() else: model_state_dict = trainer.updater.model.state_dict() snapshot_dict = { "trainer": s.target, "model": model_state_dict, "optimizer": trainer.updater.get_optimizer('main').state_dict() } # save snapshot dictionary fn = filename.format(trainer) prefix = 'tmp' + fn tmpdir = tempfile.mkdtemp(prefix=prefix, dir=trainer.out) tmppath = os.path.join(tmpdir, fn) try: savefun(snapshot_dict, tmppath) shutil.move(tmppath, os.path.join(trainer.out, fn)) finally: shutil.rmtree(tmpdir)
def _torch_snapshot_object(trainer: Trainer, target, filename: str, savefunc): s = DictionarySerializer() s.save(trainer) # fixme remove parts for asr if hasattr(trainer.updater.model, "model"): if hasattr(trainer.updater.model.model, "module"): model_state_dict = trainer.updater.model.model.module.state_dict() else: model_state_dict = trainer.updater.model.model.state_dict() else: if hasattr(trainer.updater.model, "module"): model_state_dict = trainer.updater.model.module.state_dict() else: model_state_dict = trainer.updater.model.state_dict() snapshot_dict = { "trainer": s.target, "model": model_state_dict, "optimizer": trainer.updater.get_optimizer("main").state_dict() } # why such complex path fn = filename.format(trainer) prefix = "tmp" + fn tmpdir = tempfile.mktemp(prefix, trainer.out) tmppath = os.path.join(tmpdir, fn) try: savefunc(snapshot_dict, tmppath) shutil.move(tmppath, os.path.join(trainer.out, fn)) finally: shutil.rmtree(tmpdir)