Ejemplo n.º 1
0
gan_model_tuda = StyleGanModel[HeatmapToImage](enc_dec.generator, StyleGANLoss(discriminator_img), (0.001/4, 0.0015/4))
gan_model_obratno = StyleGanModel[HG_skeleton](hg, StyleGANLoss(hm_discriminator), (2e-5, 0.0015/4))

style_opt = optim.Adam(enc_dec.style_encoder.parameters(), lr=1e-5)

writer = SummaryWriter(f"{Paths.default.board()}/hm2img{int(time.time())}")
WR.writer = writer

batch = next(LazyLoader.w300().loader_train_inf)
test_img = batch["data"].cuda()
test_landmarks = torch.clamp(next(LazyLoader.w300_landmarks(args.data_path).loader_train_inf).cuda(), max=1)
test_hm = heatmapper.forward(test_landmarks).sum(1, keepdim=True).detach()
test_noise = mixing_noise(batch_size, 512, 0.9, device)

psp_loss = PSPLoss().cuda()
mes_loss = MesBceWasLoss(heatmapper, bce_coef=10000/2, was_coef=100)

image_accumulator = Accumulator(enc_dec.generator, decay=0.99, write_every=100)
hm_accumulator = Accumulator(hg, decay=0.99, write_every=100)

#
# fake, _ = enc_dec.generate(test_hm, test_noise)
# plt_img = torch.cat([test_img[:3], fake[:3]]).detach().cpu()
# plt_lm = torch.cat([hg.forward(test_img)["mes"].coord[:3], test_landmarks[:3]]).detach().cpu()
# plot_img_with_lm(plt_img, plt_lm, nrows=2, ncols=3)


for i in range(100000):

    WR.counter.update(i)
Ejemplo n.º 2
0
style_opt = optim.Adam(enc_dec.style_encoder.parameters(), lr=1e-5)

print(f"board path: {Paths.default.board()}/cardio{int(time.time())}")
writer = SummaryWriter(f"{Paths.default.board()}/cardio{int(time.time())}")
WR.writer = writer

#%%

test_batch = next(LazyLoader.cardio().loader_train_inf)
test_img = test_batch["image"].cuda()
test_landmarks = next(LazyLoader.cardio_landmarks(args.data_path).loader_train_inf).cuda()
test_measure = UniformMeasure2D01(torch.clamp(test_landmarks, max=1))
test_hm = heatmapper.forward(test_measure.coord).sum(1, keepdim=True).detach()
test_noise = mixing_noise(batch_size, 512, 0.9, device)

psp_loss = PSPLoss(id_lambda=0).cuda()
mes_loss = MesBceWasLoss(heatmapper, bce_coef=100000, was_coef=2000)

image_accumulator = Accumulator(enc_dec.generator, decay=0.99, write_every=100)
hm_accumulator = Accumulator(hg, decay=0.99, write_every=100)

for i in range(100000):

    WR.counter.update(i)

    try:
        real_img = next(LazyLoader.cardio().loader_train_inf)["image"].cuda()
        landmarks = next(LazyLoader.cardio_landmarks(args.data_path).loader_train_inf).cuda()
        measure = UniformMeasure2D01(torch.clamp(landmarks, max=1))
        heatmap_sum = heatmapper.forward(measure.coord).sum(1, keepdim=True).detach()
    except Exception as e:
Ejemplo n.º 3
0
writer = SummaryWriter(f"{Paths.default.board()}/hm2img{int(time.time())}")
WR.writer = writer

N = 3148

heatmapper = ToGaussHeatMap(256, 4)

batch = next(LazyLoader.w300().loader_train_inf)
test_img = batch["data"].cuda()
test_landmarks = torch.clamp(batch["meta"]["keypts_normalized"].cuda(),
                             max=1).cuda()
test_hm = heatmapper.forward(test_landmarks).sum(1, keepdim=True).detach()
test_noise = mixing_noise(batch_size, 512, 0.9, device)

psp_loss = PSPLoss(l2_lambda=2.0).cuda()

image_accumulator = Accumulator(enc_dec.generator, decay=0.99, write_every=100)
enc_accumulator = Accumulator(enc_dec.style_encoder,
                              decay=0.99,
                              write_every=100)

for i in range(100000):

    WR.counter.update(i)

    batch = next(LazyLoader.w300().loader_train_inf)

    real_img = next(LazyLoader.w300().loader_train_inf)["data"].cuda()
    landmarks = torch.clamp(batch["meta"]["keypts_normalized"].cuda(),
                            max=1).cuda()