Exemple #1
0
        'checkpoints/{}/{}_training_history_{}.pkl'.format(
            expt, model, time_stamp)
    sep()
    logging.info('Saving: {}'.format(checkpoint_location))
    pkl.dump(h, open(checkpoint_location, 'wb'))


if __name__ == "__main__":
    expt = 'mimic'
    model = 'ind_gan'
    marker = 'H'
    pr_time, fl_time = time_stp()

    logger(expt, model, fl_time, marker)

    log_time('Start', pr_time)
    args = comparison_argparse()
    main(model=model,
         time_stamp=fl_time,
         device=args['device'],
         ally_classes=args['n_ally'],
         advr_1_classes=args['n_advr_1'],
         advr_2_classes=args['n_advr_2'],
         encoding_dim=args['dim'],
         hidden_dim=args['hidden_dim'],
         leaky=args['leaky'],
         test_size=args['test_size'],
         batch_size=args['batch_size'],
         n_epochs=args['n_epochs'],
         shuffle=args['shuffle'] == 1,
         lr_ally=args['lr_ally'],
Exemple #2
0
    for idx, advr in enumerate(advrs):
        model_ckpt = 'ckpts/{}/models/{}_advr_{}.stop'.format(
            expt, template, idx)
        logging.info('Saving: {}'.format(model_ckpt))
        torch.save(advr.state_dict(), model_ckpt)


if __name__ == '__main__':
    expt = 'mnist'
    model = 'encd_pretrain'
    marker = 'A'

    pr_time, fl_time = time_stp()
    logger(expt, model, fl_time, marker)

    log_time('Start', pr_time)
    args = eigan_argparse()
    main(
        model=model,
        device=args['device'],
        ally_classes=args['n_ally'],
        advr_classes=args['n_advr'],
        batch_size=args['batch_size'],
        n_epochs=args['n_epochs'],
        lr_encd=args['lr_encd'],
        lr_ally=args['lr_ally'],
        lr_advr=args['lr_advr'],
        expt=args['expt'],
        marker=marker,
    )