Beispiel #1
0
def _get_loss(F, model, batch, forward_barch_fun, **kws):
    device = torch.device(F.device)
    y = batch['label'].to(device)
    y_ = forward_barch_fun(F, model, batch, **kws)
    loss = BCELoss()(y_, y)
    sc = {}
    sc['loss'] = loss.item()
    sc['auc'] = np.mean([
        calc_auc(y_.cpu().detach().numpy()[:, i],
                 y.cpu().detach().numpy()[:, i]) for i in range(y.shape[1])
    ])
    sc['score'] = scoring(sc['loss'])
    return loss, sc
        label_fake_G_var = Variable(onehot[label_fake].cuda() if CONFIG["GPU_NUMS"] > 0 else onehot[label_fake])
        label_fake_D_var = Variable(fill[label_fake].cuda() if CONFIG["GPU_NUMS"] > 0 else fill[label_fake])

        g_result = NetG(img_fake_var, label_fake_G_var)
        d_result = NetD(g_result, label_fake_D_var)
        d_result = d_result.squeeze()
        D_LOSS_FAKE = BCELoss()(d_result, label_false_var)

        D_train_loss = D_LOSS_REAL + D_LOSS_FAKE
        D_train_loss.backward()
        D_optimizer.step()

        NetG.zero_grad()
        img_fake = torch.randn((mini_batch, 100)).view(-1, 100, 1, 1)
        label_fake = (torch.rand(mini_batch, 1) * 10).type(torch.LongTensor).squeeze()
        img_fake_var = Variable(img_fake.cuda() if CONFIG["GPU_NUMS"] > 0 else img_fake)
        label_fake_G_var = Variable(onehot[label_fake].cuda() if CONFIG["GPU_NUMS"] > 0 else onehot[label_fake])
        label_fake_D_var = Variable(fill[label_fake].cuda() if CONFIG["GPU_NUMS"] > 0 else fill[label_fake])
        g_result = NetG(img_fake_var, label_fake_G_var)
        d_result = NetD(g_result, label_fake_D_var)
        d_result = d_result.squeeze()
        G_train_loss= BCELoss()(d_result, label_true_var)
        G_train_loss.backward()
        G_optimizer.step()

        bar.show(epoch, D_train_loss.item(), G_train_loss.item())

    test_images = NetG(fixed_z_, fixed_y_label_)

    torchvision.utils.save_image(test_images.data[:100],'outputs/mnist_%03d.png' % (epoch),nrow=10,
                                 normalize=True,range=(-1,1), padding=0)