예제 #1
0
                             distributed=args.distributed),
        drop_last=True,
    )

    if get_rank() == 0 and wandb is not None and args.wandb:
        wandb.init(project="dualstylegan")

    ckpt = torch.load(args.encoder_path, map_location='cpu')
    opts = ckpt['opts']
    opts['checkpoint_path'] = args.encoder_path
    if 'learn_in_w' not in opts:
        opts['learn_in_w'] = True
    if 'output_size' not in opts:
        opts['output_size'] = 1024
    opts = Namespace(**opts)
    encoder = pSp(opts).to(device).eval()
    encoder.latent_avg = encoder.latent_avg.to(device)
    vggloss = VGG19().to(device).eval()
    id_loss = id_loss.IDLoss(args.identity_path).to(device).eval()

    print('Encoder model successfully loaded!')

    instyle_dict = np.load(args.instyle_path, allow_pickle='TRUE').item()
    exstyle_dict = np.load(args.exstyle_path, allow_pickle='TRUE').item()
    path = args.image_path
    instyles = []
    exstyles = []
    Simgs = []
    for filename in instyle_dict.keys():
        instyle = instyle_dict[filename]
        exstyle = exstyle_dict[filename]
# load DualStyleGAN
generator = DualStyleGAN(1024, 512, 8, 2, res_index=6)
generator.eval()
ckpt = torch.load(os.path.join(MODEL_DIR, style_type, 'generator.pt'),
                  map_location=lambda storage, loc: storage)
generator.load_state_dict(ckpt["g_ema"])
generator = generator.to(device)

# load encoder
model_path = os.path.join(MODEL_DIR, 'encoder.pt')
ckpt = torch.load(model_path, map_location='cpu')
opts = ckpt['opts']
opts['checkpoint_path'] = model_path
opts = Namespace(**opts)
opts.device = device
encoder = pSp(opts)
encoder.eval()
encoder = encoder.to(device)

# load extrinsic style code
exstyles = np.load(os.path.join(MODEL_DIR, style_type,
                                MODEL_PATHS[style_type + '-S']["name"]),
                   allow_pickle='TRUE').item()

# load sampler network
icptc = ICPTrainer(np.empty([0, 512 * 11]), 128)
icpts = ICPTrainer(np.empty([0, 512 * 7]), 128)
ckpt = torch.load(os.path.join(MODEL_DIR, style_type, 'sampler.pt'),
                  map_location=lambda storage, loc: storage)
icptc.icp.netT.load_state_dict(ckpt['color'])
icpts.icp.netT.load_state_dict(ckpt['structure'])