Esempio n. 1
0
from itertools import product

C = 2
D = (1, 28, 28)
K = 9
L = int(1e4)

N = (4,)


x = torch.randn(*N, *D)
y = torch.randint(0, C, N)


type_ = 'vae'
beta = 1e-2
net = Net(D, C, latent_dim=K, latent_sampling=L, beta=beta, type_of_net=type_)

_x_, logit_, mu_z, lv_z, z_ = net.forward(x, y)
print(logit_.shape)

y_ = F.softmax(logit_, -1)

loss_ = net.loss(x, y, _x_, y_, mu_z, lv_z, return_all_losses=True)

_x, logit, loss = net.evaluate(x, return_all_losses=True)
print(logit.shape)

if type_ != 'vae':
    y_pred = net.predict_after_evaluate(logit, loss)
Esempio n. 2
0
            sigma=0,
            gamma=gamma,
            force_cross_y=0,
            latent_sampling=L,
            latent_dim=K)
    n.to(d)
    nets[ntype] = n
    # n.compute_max_batch_size(batch_size=1024)
    # print(n.max_batch_sizes)
    
    if n.y_is_coded:
        pass

    if ntype != 'vae':
        print('y in input')
        out_y[ntype] = n.evaluate(x, y)
    print('y is none')
    out[ntype] = n.evaluate(x)


for o, _y in zip((out, out_y), ('*', 'y')):
    for t in o:

        for k in o[t][2]:
            ll = o[t][2][k]
            print(t, _y, k, 'loss  :', *ll.shape)
        logits = o[t][1]
        print(t, _y, 'logits:', *logits.shape)
        x_ = o[t][0]
        print(t, _y, 'x_:', *x_.shape)