Ejemplo n.º 1
0
                        action='store_true',
                        default=False,
                        help='Disables plot of train/test losses')
    parser.add_argument(
        '--no-print',
        action='store_true',
        default=False,
        help='Disables print of reached best values of metrics')
    args = parser.parse_args()

    if args.cp_path is None:
        raise ValueError(
            'There is no checkpoint/model path. Use arg --cp-path to indicate the path!'
        )

    generator = model_.Generator_toy(512)

    ckpt = torch.load(args.cp_path, map_location=lambda storage, loc: storage)
    generator.load_state_dict(ckpt['model_state'])

    history = ckpt['history']

    if not args.no_plots:

        plot_learningcurves(history, 'gen_loss')
        plot_learningcurves(history, 'disc_loss')
        plot_learningcurves(history, 'FD')
        plot_learningcurves(history, 'quality_samples')
        plot_learningcurves(history, 'quality_modes')

    if not args.no_print:
Ejemplo n.º 2
0
args.cuda = True if not args.no_cuda and torch.cuda.is_available() else False

torch.manual_seed(args.seed)
if args.cuda:
    torch.cuda.manual_seed(args.seed)

toy_data = ToyData(args.toy_dataset, args.toy_length)
train_loader = torch.utils.data.DataLoader(toy_data,
                                           batch_size=args.batch_size,
                                           num_workers=args.workers)

centers = toy_data.get_centers()
cov = toy_data.get_cov()

# hidden_size = 512
generator = model.Generator_toy(512).train()

disc = model.Discriminator_toy(512, optim.Adam, args.lr,
                               (args.beta1, args.beta2)).train()

optimizer = optim.Adam(generator.parameters(),
                       lr=args.lr,
                       betas=(args.beta1, args.beta2))

trainer = TrainLoop(generator,
                    disc,
                    optimizer,
                    args.toy_dataset,
                    centers,
                    cov,
                    train_loader=train_loader,