Exemplo n.º 1
0
def save_model(model: Seq2Seq, discriminator: Discriminator, main_optimizer,
               discriminator_optimizer, filename):
    model_state_dict = model.state_dict()
    for key in model_state_dict.keys():
        model_state_dict[key] = model_state_dict[key].cpu()
    discriminator_state_dict = discriminator.state_dict()
    for key in discriminator_state_dict.keys():
        discriminator_state_dict[key] = discriminator_state_dict[key].cpu()
    torch.save(
        {
            'model': model_state_dict,
            'encoder_n_layers': model.encoder_n_layers,
            'decoder_n_layers': model.decoder_n_layers,
            'rnn_size': model.rnn_size,
            'dropout': model.dropout,
            'output_size': model.output_size,
            'embedding_dim': model.embedding_dim,
            'bidirectional': model.bidirectional,
            'attention': model.use_attention,
            'max_length': model.max_length,
            'enable_embedding_training': model.enable_embedding_training,
            'discriminator': discriminator_state_dict,
            'discriminator_hidden_size': discriminator.hidden_size,
            'discriminator_n_layers': discriminator.n_layers,
            'main_optimizer': main_optimizer.state_dict(),
            'discriminator_optimizer': discriminator_optimizer.state_dict()
        }, filename)
Exemplo n.º 2
0
    # Update learning rates
    lr_scheduler_G.step()
    lr_scheduler_D_A.step()
    lr_scheduler_D_B.step()

    # Save models checkpoints
    dataset_name = opt.dataroot.split('/')[-1]

    if not os.path.exists('./data/checkpoints/output_{}_{}'.format(
            info, dataset_name)):
        os.mkdir('./data/checkpoints/output_{}_{}'.format(info, dataset_name))

    torch.save(
        netG_A2B.state_dict(),
        './data/checkpoints/output_{}_{}/netG_A2B_{}.pth'.format(
            info, dataset_name, epoch))
    torch.save(
        netG_B2A.state_dict(),
        './data/checkpoints/output_{}_{}/netG_B2A_{}.pth'.format(
            info, dataset_name, epoch))
    torch.save(
        netD_A.state_dict(),
        './data/checkpoints/output_{}_{}/netD_A_{}.pth'.format(
            info, dataset_name, epoch))
    torch.save(
        netD_B.state_dict(),
        './data/checkpoints/output_{}_{}/netD_B_{}.pth'.format(
            info, dataset_name, epoch))
###################################
        print(
            "[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f"
            % (
                epoch,
                opt.niter,
                i,
                len(dataloader),
                errD.item(),
                errG.item(),
                D_x,
                D_G_z1,
                D_G_z2,
            )
        )
        if i % 100 == 0:
            vutils.save_image(
                real_cpu, "%s/real_samples.png" % opt.outf, normalize=True
            )
            fake = generator(fixed_noise)
            vutils.save_image(
                fake.detach(),
                "%s/fake_samples_epoch_%03d.png" % (opt.outf, epoch),
                normalize=True,
            )

    # do checkpointing
    torch.save(generator.state_dict(), "%s/netG_epoch_%d.pth" % (opt.outf, epoch))
    torch.save(discriminator.state_dict(), "%s/netD_epoch_%d.pth" % (opt.outf, epoch))