Exemple #1
0
def verka_cardio_w2(enc):
    sum_loss = 0
    n = len(LazyLoader.cardio().test_dataset)
    for i, batch in enumerate(LazyLoader.cardio().test_loader):
        data = batch['image'].cuda()
        landmarks_ref = batch["keypoints"].cuda()
        pred = enc(data)["mes"].coord
        sum_loss += OTWasDist().forward(pred, landmarks_ref).sum().item()
    print("test brule_loss: ", sum_loss / n)
    return sum_loss / n
Exemple #2
0
from dataset.lazy_loader import LazyLoader, W300DatasetLoader, CelebaWithKeyPoints, Celeba
from sklearn.neighbors import NearestNeighbors
import numpy as np
import matplotlib.pyplot as plt
from dataset.toheatmap import ToGaussHeatMap
from dataset.probmeasure import UniformMeasure2D01
import pandas as pd
import networkx as nx
import ot
from barycenters.sampler import Uniform2DBarycenterSampler, Uniform2DAverageSampler
from parameters.path import Paths
from joblib import Parallel, delayed

N = 300
dataset = LazyLoader.cardio().dataset_train
padding = 200
prob = np.ones(padding) / padding

data_ids = np.random.permutation(np.arange(0, 700))[0:N]


def LS(k):
    return dataset[k]['keypoints'].numpy()


ls = [LS(k) for k in data_ids]

bc_sampler = Uniform2DBarycenterSampler(padding, dir_alpha=1.0)

bc = bc_sampler.mean(ls)
Exemple #3
0
hm_discriminator = Discriminator(image_size, input_nc=1, channel_multiplier=1)
hm_discriminator.load_state_dict(weights["dh"])
hm_discriminator = hm_discriminator.cuda()

gan_model_tuda = StyleGanModel[HeatmapToImage](enc_dec.generator, StyleGANLoss(discriminator_img), (0.001/4, 0.0015/4))
gan_model_obratno = StyleGanModel[HG_skeleton](hg, StyleGANLoss(hm_discriminator, r1=3), (2e-5, 0.0015/4))

style_opt = optim.Adam(enc_dec.style_encoder.parameters(), lr=1e-5)

print(f"board path: {Paths.default.board()}/cardio{int(time.time())}")
writer = SummaryWriter(f"{Paths.default.board()}/cardio{int(time.time())}")
WR.writer = writer

#%%

test_batch = next(LazyLoader.cardio().loader_train_inf)
test_img = test_batch["image"].cuda()
test_landmarks = next(LazyLoader.cardio_landmarks(args.data_path).loader_train_inf).cuda()
test_measure = UniformMeasure2D01(torch.clamp(test_landmarks, max=1))
test_hm = heatmapper.forward(test_measure.coord).sum(1, keepdim=True).detach()
test_noise = mixing_noise(batch_size, 512, 0.9, device)

psp_loss = PSPLoss(id_lambda=0).cuda()
mes_loss = MesBceWasLoss(heatmapper, bce_coef=100000, was_coef=2000)

image_accumulator = Accumulator(enc_dec.generator, decay=0.99, write_every=100)
hm_accumulator = Accumulator(hg, decay=0.99, write_every=100)

for i in range(100000):

    WR.counter.update(i)
hg = HG_heatmap(heatmapper, num_blocks=1, num_classes=200)
hg.load_state_dict(weights['gh'])
hg = hg.cuda()
cont_opt = optim.Adam(hg.parameters(), lr=2e-5, betas=(0, 0.8))

gan_model_tuda = StyleGanModel[HeatmapToImage](enc_dec.generator,
                                               StyleGANLoss(discriminator_img),
                                               (0.001 / 4, 0.0015 / 4))

style_opt = optim.Adam(enc_dec.style_encoder.parameters(), lr=1e-5)

writer = SummaryWriter(
    f"{Paths.default.board()}/brule1_cardio_{int(time.time())}")
WR.writer = writer

batch = next(LazyLoader.cardio().loader_train_inf)

batch_test = next(iter(LazyLoader.cardio().test_loader))
test_img = batch["image"].cuda()

test_landmarks = batch["keypoints"].cuda()
test_measure = UniformMeasure2D01(torch.clamp(test_landmarks, max=1))
test_hm = heatmapper.forward(test_measure.coord).sum(1, keepdim=True).detach()
test_noise = mixing_noise(batch_size, 512, 0.9, device)

psp_loss = PSPLoss(id_lambda=0).cuda()
mes_loss = MesBceWasLoss(heatmapper, bce_coef=10000, was_coef=100)

image_accumulator = Accumulator(enc_dec.generator, decay=0.99, write_every=100)
hm_accumulator = Accumulator(hg, decay=0.99, write_every=100)