def evel_model(G, save_path, name, data1, data2, data3, r_data, distance):
    no_graph = False
    nz = 2
    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()
    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()
    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()
    df1 = pd.DataFrame()
    df2 = pd.DataFrame()
    df1['score_{0}'.format(0)] = data1_g.numpy()
    df1['score_{0}'.format(1)] = data2_g.numpy()
    df1['score_{0}'.format(2)] = data3_g.numpy()
    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).numpy()
    np.save(save_path + '/%s_data'%name, g_data)
    df2['score_{0}'.format(2)] = g_data
    if not no_graph:
        fig, ax = plt.subplots(1, 1)
        for s in df1.columns:
            df1[s].plot(kind='kde')
        for s in df2.columns:
            df2[s].plot(style='--', kind='kde')
        plt.xlim((-4, 9 + distance * 2))
        ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
        # plt.title(name)
        fig.savefig(save_path + '/%s.eps'%name)
    mean0_0, var0_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1), np.expand_dims(data1.cpu().numpy(),axis=1))
    mean0_1, var0_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                     np.expand_dims(data2.cpu().numpy(), axis=1))
    mean0_2, var0_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                     np.expand_dims(data3.cpu().numpy(), axis=1))
    mean0, var0 = polynomial_mmd(np.expand_dims(g_data, axis=1), np.expand_dims(r_data, axis=1))
    with open(save_path + f'/results.txt', 'a+') as f:
        f.write(f'{name}:\n')
        f.write(f'{mean0_0}, {var0_0}\n')
        f.write(f'{mean0_1}, {var0_1}\n')
        f.write(f'{mean0_2}, {var0_2}\n')
        f.write(f'{mean0}, {var0}\n')
    return (mean0_0, var0_0), (mean0_1, var0_1), (mean0_2, var0_2), (mean0, var0)
Ejemplo n.º 2
0
def multi_results(distance):
    # time.sleep(distance*3)
    nz = 2
    G = G_guassian(nz=nz, num_classes=3).cuda()

    D = D_guassian(num_classes=3).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002,
                      betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002,
                      betas=(0.5, 0.999))

    distance = (distance+2)/2
    if os.path.exists(os.path.join('MOG','1D', str(distance) + '_1D')):
        pass
    else:
        os.makedirs(os.path.join('MOG','1D', str(distance) + '_1D'))
    save_path = os.path.join('MOG','1D', str(distance) + '_1D')

    data1 = torch.randn(128000).cuda()
    data2 = torch.randn(128000).cuda() * 2 + distance
    data3 = torch.randn(128000).cuda() * 3 + distance * 2

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1.cpu().numpy()
    df1['score_{0}'.format(1)] = data2.cpu().numpy()
    df1['score_{0}'.format(2)] = data3.cpu().numpy()
    r_data = torch.cat([data1, data2, data3], dim=0).cpu().numpy()
    df2['score_{0}'.format(2)] = r_data
    np.save(save_path+'/o_data',r_data)

    fig, ax = plt.subplots(1, 1)
    for s in df1.columns:
        df1[s].plot(kind='kde')

    for s in df2.columns:
        df2[s].plot(style='--',kind='kde')
    plt.xlim((-4, 9 + distance * 2))
    ax.legend(["Class_0", "Class_1","Class_2","Marginal"])
    plt.title('Original')
    fig.savefig(save_path + '/original.eps')

    train(data1,data2,data3,G,D,optd,optg,AC=True,MI=True)

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1_g.numpy()
    df1['score_{0}'.format(1)] = data2_g.numpy()
    df1['score_{0}'.format(2)] = data3_g.numpy()
    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).numpy()
    np.save(save_path + '/twin_ac_data', g_data)
    df2['score_{0}'.format(2)] = g_data

    fig, ax = plt.subplots(1, 1)
    for s in df1.columns:
        df1[s].plot(kind='kde')

    for s in df2.columns:
        df2[s].plot(style='--',kind='kde')

    plt.xlim((-4, 9 + distance * 2))
    ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
    plt.title('TAC')
    fig.savefig(save_path + '/twin_ac.eps')

    mean0_0,var0_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1), np.expand_dims(data1.cpu().numpy(),axis=1))
    mean0_1, var0_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                     np.expand_dims(data2.cpu().numpy(), axis=1))
    mean0_2, var0_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                     np.expand_dims(data3.cpu().numpy(), axis=1))

    mean0, var0 = polynomial_mmd(np.expand_dims(g_data, axis=1), np.expand_dims(r_data, axis=1))

    # ac
    G = G_guassian(nz=nz, num_classes=3).cuda()

    D = D_guassian(num_classes=3).cuda()


    optg = optim.Adam(G.parameters(), lr=0.002,
                      betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002,
                      betas=(0.5, 0.999))

    for _ in range(20):
        for i in range(1000):

            #####D step
            for _ in range(1):
                data = torch.cat(
                    [data1[128 * i:128 * i + 128], data2[128 * i:128 * i + 128], data3[128 * i:128 * i + 128]],
                    dim=0).unsqueeze(dim=1)
                label = torch.cat([torch.ones(128).cuda().long() * 0, torch.ones(128).cuda().long() * 1,
                                   torch.ones(128).cuda().long() * 2], dim=0)

                ###D
                d_real, c, _ = D(data)

                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, _, mi = D(fake_data)

                D_loss = F.binary_cross_entropy(d_real, torch.ones(384).cuda()) \
                         + F.binary_cross_entropy(d_fake, torch.zeros(256).cuda()) \
                         + F.cross_entropy(c, label)

                optd.zero_grad()
                D_loss.backward()
                optd.step()

            #####G step
            if i % 10 == 0:
                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, c, mi = D(fake_data)

                G_loss = F.binary_cross_entropy(d_fake, torch.ones(256).cuda()) + F.cross_entropy(c,fake_label)

                optg.zero_grad()
                G_loss.backward()
                optg.step()

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1_g.numpy()
    df1['score_{0}'.format(1)] = data2_g.numpy()
    df1['score_{0}'.format(2)] = data3_g.numpy()
    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).numpy()
    np.save(save_path + '/ac_data', g_data)
    df2['score_{0}'.format(2)] = g_data

    fig, ax = plt.subplots(1, 1)
    for s in df1.columns:
        df1[s].plot(kind='kde')

    for s in df2.columns:
        df2[s].plot(style='--',kind='kde')
    plt.xlim((-4, 9 + distance * 2))
    ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
    plt.title('AC')

    fig.savefig(save_path + '/ac.eps')

    mean1_0, var1_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1),
                                     np.expand_dims(data1.cpu().numpy(), axis=1))
    mean1_1, var1_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                     np.expand_dims(data2.cpu().numpy(), axis=1))
    mean1_2, var1_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                     np.expand_dims(data3.cpu().numpy(), axis=1))

    mean1, var1 = polynomial_mmd(np.expand_dims(g_data, axis=1), np.expand_dims(r_data, axis=1))

    ####projection

    G = G_guassian(nz=nz, num_classes=3).cuda()

    D = D_guassian(num_classes=3,AC=False).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002,
                      betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002,
                      betas=(0.5, 0.999))

    for _ in range(20):
        for i in range(1000):

            #####D step
            for _ in range(1):
                data = torch.cat(
                    [data1[128 * i:128 * i + 128], data2[128 * i:128 * i + 128], data3[128 * i:128 * i + 128]],
                    dim=0).unsqueeze(dim=1)
                label = torch.cat([torch.ones(128).cuda().long() * 0, torch.ones(128).cuda().long() * 1,
                                   torch.ones(128).cuda().long() * 2], dim=0)

                ###D
                d_real, c, _ = D(data, label)

                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, _, mi = D(fake_data, fake_label)

                D_loss = F.binary_cross_entropy(d_real, torch.ones(384).cuda()) \
                         + F.binary_cross_entropy(d_fake, torch.zeros(256).cuda())

                optd.zero_grad()
                D_loss.backward()
                optd.step()

            #####G step
            if i % 10 == 0:
                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, c, mi = D(fake_data, fake_label)

                G_loss = F.binary_cross_entropy(d_fake, torch.ones(256).cuda())

                optg.zero_grad()
                G_loss.backward()
                optg.step()

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()


    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1_g.numpy()
    df1['score_{0}'.format(1)] = data2_g.numpy()
    df1['score_{0}'.format(2)] = data3_g.numpy()
    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).numpy()
    np.save(save_path + '/projection_data', g_data)
    df2['score_{0}'.format(2)] = g_data

    fig, ax = plt.subplots(1, 1)
    for s in df1.columns:
        df1[s].plot(kind='kde')

    for s in df2.columns:
        df2[s].plot(style='--', kind='kde')
    plt.xlim((-4, 9 + distance * 2))
    ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
    plt.title('Projection')

    fig.savefig(save_path + '/projection.eps')

    mean2_0, var2_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1),
                                     np.expand_dims(data1.cpu().numpy(), axis=1))
    mean2_1, var2_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                     np.expand_dims(data2.cpu().numpy(), axis=1))
    mean2_2, var2_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                     np.expand_dims(data3.cpu().numpy(), axis=1))

    mean2, var2 = polynomial_mmd(np.expand_dims(g_data, axis=1), np.expand_dims(r_data, axis=1))

    result = [str(mean0_0) + ',' + str(var0_0),
              str(mean0_1) + ',' + str(var0_1),
              str(mean0_2) + ',' + str(var0_2),
              str(mean0) + ',' + str(var0),
              str(mean1_0) + ',' + str(var1_0),
              str(mean1_1) + ',' + str(var1_1),
              str(mean1_2) + ',' + str(var1_2),
              str(mean1) + ',' + str(var1),
              str(mean2_0) + ',' + str(var2_0),
              str(mean2_1) + ',' + str(var2_1),
              str(mean2_2) + ',' + str(var2_2),
              str(mean2) + ',' + str(var2)]

    

    file = open(save_path + '/result.text', 'w')

    for content in result:
        file.write(content + '\n')
def multi_results(distance):
    time.sleep(distance * 3)
    nz = 2
    G = G_guassian(nz=nz, num_classes=3).cuda()

    D = D_guassian(num_classes=3).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002, betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002, betas=(0.5, 0.999))

    distance = (distance + 2) / 2
    if os.path.exists(os.path.join('MOG', '2D', str(distance) + '_2D')):
        pass
    else:
        os.makedirs(os.path.join('MOG', '2D', str(distance) + '_2D'))
    save_path = os.path.join('MOG', '2D', str(distance) + '_2D')

    data1 = torch.randn(128000, 2).cuda()
    data2 = torch.randn(128000, 2).cuda() * 2
    data2[:, 0] += distance
    data3 = torch.randn(128000, 2).cuda() * 3
    data3[:, 0] += distance * 2

    fig, ax = plt.subplots(1, 1)
    sns.kdeplot(data1.cpu().numpy()[:, 0], data1.cpu().numpy()[:, 1])
    sns.kdeplot(data2.cpu().numpy()[:, 0], data2.cpu().numpy()[:, 1])
    sns.kdeplot(data3.cpu().numpy()[:, 0], data3.cpu().numpy()[:, 1])
    fig.legend(["Class_0", "Class_1", "Class_2"])
    plt.xlim((-4, 9 + distance * 2))
    plt.ylim((-8, 8))
    plt.title('Original')
    fig.savefig(save_path + '/o_conditional.eps')

    r_data = torch.cat([data1, data2, data3], dim=0).cpu().numpy()

    np.save(save_path + '/o_data', r_data)

    for _ in range(40):
        for i in range(1000):

            #####D step
            for _ in range(1):
                data = torch.cat([
                    data1[128 * i:128 * i + 128], data2[128 * i:128 * i + 128],
                    data3[128 * i:128 * i + 128]
                ],
                                 dim=0).unsqueeze(dim=1)
                label = torch.cat([
                    torch.ones(128).cuda().long() * 0,
                    torch.ones(128).cuda().long() * 1,
                    torch.ones(128).cuda().long() * 2
                ],
                                  dim=0)

                ###D
                d_real, c, _ = D(data)

                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, _, mi = D(fake_data)

                D_loss = F.binary_cross_entropy(d_real, torch.ones(384).cuda()) \
                         + F.binary_cross_entropy(d_fake, torch.zeros(256).cuda()) \
                         + F.cross_entropy(c, label) \
                         + F.cross_entropy(mi, fake_label)

                optd.zero_grad()
                D_loss.backward()
                optd.step()

            #####G step
            if i % 10 == 0:
                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, c, mi = D(fake_data)

                G_loss = F.binary_cross_entropy(
                    d_fake,
                    torch.ones(256).cuda()) + F.cross_entropy(
                        c, fake_label) - F.cross_entropy(mi, fake_label)

                optg.zero_grad()
                G_loss.backward()
                optg.step()

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(
        10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(
        10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(
        10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    fig, ax = plt.subplots(1, 1)
    sns.kdeplot(data1_g.numpy()[:, 0], data1_g.numpy()[:, 1])
    sns.kdeplot(data2_g.numpy()[:, 0], data2_g.numpy()[:, 1])
    sns.kdeplot(data3_g.numpy()[:, 0], data3_g.numpy()[:, 1])
    fig.legend(["Class_0", "Class_1", "Class_2"])
    plt.xlim((-4, 9 + distance * 2))
    plt.ylim((-8, 8))
    plt.title('TAC')
    fig.savefig(save_path + '/twin_ac_conditional.eps')

    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).cpu().numpy()

    np.save(save_path + '/twin_ac_data', g_data)

    mean0_0, var0_0 = polynomial_mmd(data1_g.numpy(), data1.cpu().numpy())
    mean0_1, var0_1 = polynomial_mmd(data2_g.numpy(), data2.cpu().numpy())
    mean0_2, var0_2 = polynomial_mmd(data3_g.numpy(), data3.cpu().numpy())

    mean0, var0 = polynomial_mmd(g_data, r_data)

    # plot_density(generate_data2.cpu().detach().numpy())
    # ac
    G = G_guassian(nz=nz, num_classes=3).cuda()

    D = D_guassian(num_classes=3).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002, betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002, betas=(0.5, 0.999))

    for _ in range(40):
        for i in range(1000):

            #####D step
            for _ in range(1):
                data = torch.cat([
                    data1[128 * i:128 * i + 128], data2[128 * i:128 * i + 128],
                    data3[128 * i:128 * i + 128]
                ],
                                 dim=0).unsqueeze(dim=1)
                label = torch.cat([
                    torch.ones(128).cuda().long() * 0,
                    torch.ones(128).cuda().long() * 1,
                    torch.ones(128).cuda().long() * 2
                ],
                                  dim=0)

                ###D
                d_real, c, _ = D(data)

                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, _, mi = D(fake_data)

                D_loss = F.binary_cross_entropy(d_real, torch.ones(384).cuda()) \
                         + F.binary_cross_entropy(d_fake, torch.zeros(256).cuda()) \
                         + F.cross_entropy(c, label)

                optd.zero_grad()
                D_loss.backward()
                optd.step()

            #####G step
            if i % 10 == 0:
                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, c, mi = D(fake_data)

                G_loss = F.binary_cross_entropy(
                    d_fake,
                    torch.ones(256).cuda()) + F.cross_entropy(c, fake_label)

                optg.zero_grad()
                G_loss.backward()
                optg.step()

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(
        10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(
        10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(
        10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    fig, ax = plt.subplots(1, 1)
    sns.kdeplot(data1_g.numpy()[:, 0], data1_g.numpy()[:, 1])
    sns.kdeplot(data2_g.numpy()[:, 0], data2_g.numpy()[:, 1])
    sns.kdeplot(data3_g.numpy()[:, 0], data3_g.numpy()[:, 1])
    fig.legend(["Class_0", "Class_1", "Class_2"])
    plt.xlim((-4, 9 + distance * 2))
    plt.ylim((-8, 8))
    plt.title('AC')
    fig.savefig(save_path + '/ac_conditional.eps')

    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).cpu().numpy()

    np.save(save_path + '/ac_data', g_data)

    mean1_0, var1_0 = polynomial_mmd(data1_g.numpy(), data1.cpu().numpy())
    mean1_1, var1_1 = polynomial_mmd(data2_g.numpy(), data2.cpu().numpy())
    mean1_2, var1_2 = polynomial_mmd(data3_g.numpy(), data3.cpu().numpy())

    mean1, var1 = polynomial_mmd(g_data, r_data)

    ####projection

    G = G_guassian(nz=nz, num_classes=3).cuda()

    D = D_guassian(num_classes=3, AC=False).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002, betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002, betas=(0.5, 0.999))

    for _ in range(40):
        for i in range(1000):

            #####D step
            for _ in range(1):
                data = torch.cat([
                    data1[128 * i:128 * i + 128], data2[128 * i:128 * i + 128],
                    data3[128 * i:128 * i + 128]
                ],
                                 dim=0).unsqueeze(dim=1)
                label = torch.cat([
                    torch.ones(128).cuda().long() * 0,
                    torch.ones(128).cuda().long() * 1,
                    torch.ones(128).cuda().long() * 2
                ],
                                  dim=0)

                ###D
                d_real, c, _ = D(data, label)

                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, _, mi = D(fake_data, fake_label)

                D_loss = F.binary_cross_entropy(d_real, torch.ones(384).cuda()) \
                         + F.binary_cross_entropy(d_fake, torch.zeros(256).cuda())

                optd.zero_grad()
                D_loss.backward()
                optd.step()

            #####G step
            if i % 10 == 0:
                z = torch.randn(256, nz).cuda()
                fake_label = torch.LongTensor(256).random_(3).cuda()
                fake_data = G(z, label=fake_label)
                d_fake, c, mi = D(fake_data, fake_label)

                G_loss = F.binary_cross_entropy(d_fake, torch.ones(256).cuda())

                optg.zero_grad()
                G_loss.backward()
                optg.step()

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(
        10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(
        10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(
        10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    fig, ax = plt.subplots(1, 1)
    sns.kdeplot(data1_g.numpy()[:, 0], data1_g.numpy()[:, 1])
    sns.kdeplot(data2_g.numpy()[:, 0], data2_g.numpy()[:, 1])
    sns.kdeplot(data3_g.numpy()[:, 0], data3_g.numpy()[:, 1])
    fig.legend(["Class_0", "Class_1", "Class_2"])
    plt.xlim((-4, 9 + distance * 2))
    plt.ylim((-8, 8))
    fig.savefig(save_path + '/projection_conditional.eps')

    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).cpu().numpy()

    np.save(save_path + '/projection_data', g_data)

    mean2_0, var2_0 = polynomial_mmd(data1_g.numpy(), data1.cpu().numpy())
    mean2_1, var2_1 = polynomial_mmd(data2_g.numpy(), data2.cpu().numpy())
    mean2_2, var2_2 = polynomial_mmd(data3_g.numpy(), data3.cpu().numpy())

    mean2, var2 = polynomial_mmd(g_data, r_data)

    result = [
        str(mean0_0) + ',' + str(var0_0),
        str(mean0_1) + ',' + str(var0_1),
        str(mean0_2) + ',' + str(var0_2),
        str(mean0) + ',' + str(var0),
        str(mean1_0) + ',' + str(var1_0),
        str(mean1_1) + ',' + str(var1_1),
        str(mean1_2) + ',' + str(var1_2),
        str(mean1) + ',' + str(var1),
        str(mean2_0) + ',' + str(var2_0),
        str(mean2_1) + ',' + str(var2_1),
        str(mean2_2) + ',' + str(var2_2),
        str(mean2) + ',' + str(var2)
    ]

    file = open(save_path + '/result.text', 'w')

    for content in result:
        file.write(content + '\n')
Ejemplo n.º 4
0
def multi_results(distance, gan_loss='bce', dis_mlp=False, run_id=0, suffix='', no_graph=False):
    if not suffix and dis_mlp:
        suffix = '_mlp'
    # time.sleep(distance*3)
    nz = 2
    G = G_guassian(nz=nz, num_classes=3).cuda()
    D = D_guassian(num_classes=3, AC=True, dis_mlp=dis_mlp).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002, betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002, betas=(0.5, 0.999))

    distance = (distance + 2) / 2
    if os.path.exists(os.path.join('MOG', '1D', f'{distance}_{gan_loss}{suffix}_{run_id}')):
        pass
    else:
        os.makedirs(os.path.join('MOG', '1D', f'{distance}_{gan_loss}{suffix}_{run_id}'))
    save_path = os.path.join('MOG', '1D', f'{distance}_{gan_loss}{suffix}_{run_id}')

    data1 = torch.randn(128000).cuda()
    data2 = torch.randn(128000).cuda() * 2 + distance
    data3 = torch.randn(128000).cuda() * 3 + distance * 2

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1.cpu().numpy()
    df1['score_{0}'.format(1)] = data2.cpu().numpy()
    df1['score_{0}'.format(2)] = data3.cpu().numpy()
    r_data = torch.cat([data1, data2, data3], dim=0).cpu().numpy()
    df2['score_{0}'.format(2)] = r_data
    np.save(save_path+'/o_data', r_data)

    if not no_graph:
        fig, ax = plt.subplots(1, 1)
        for s in df1.columns:
            df1[s].plot(kind='kde')
        for s in df2.columns:
            df2[s].plot(style='--', kind='kde')
        plt.xlim((-4, 9 + distance * 2))
        ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
        plt.title('Original')
        fig.savefig(save_path + '/original.eps')

    train(data1, data2, data3, nz, G, D, optd, optg, AC=True, MI=True, gan_loss=gan_loss)
    print('TAC training done.')

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1_g.numpy()
    df1['score_{0}'.format(1)] = data2_g.numpy()
    df1['score_{0}'.format(2)] = data3_g.numpy()
    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).numpy()
    np.save(save_path + '/twin_ac_data', g_data)
    df2['score_{0}'.format(2)] = g_data

    if not no_graph:
        fig, ax = plt.subplots(1, 1)
        for s in df1.columns:
            df1[s].plot(kind='kde')
        for s in df2.columns:
            df2[s].plot(style='--', kind='kde')
        plt.xlim((-4, 9 + distance * 2))
        ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
        plt.title('TAC')
        fig.savefig(save_path + '/twin_ac.eps')

    mean0_0, var0_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1), np.expand_dims(data1.cpu().numpy(),axis=1))
    mean0_1, var0_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                     np.expand_dims(data2.cpu().numpy(), axis=1))
    mean0_2, var0_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                     np.expand_dims(data3.cpu().numpy(), axis=1))

    mean0, var0 = polynomial_mmd(np.expand_dims(g_data, axis=1), np.expand_dims(r_data, axis=1))

    # ac
    G = G_guassian(nz=nz, num_classes=3).cuda()
    D = D_guassian(num_classes=3, AC=True, dis_mlp=dis_mlp).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002, betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002, betas=(0.5, 0.999))

    train(data1, data2, data3, nz, G, D, optd, optg, AC=True, MI=False, gan_loss=gan_loss)
    print('AC training done.')

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1_g.numpy()
    df1['score_{0}'.format(1)] = data2_g.numpy()
    df1['score_{0}'.format(2)] = data3_g.numpy()
    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).numpy()
    np.save(save_path + '/ac_data', g_data)
    df2['score_{0}'.format(2)] = g_data

    if not no_graph:
        fig, ax = plt.subplots(1, 1)
        for s in df1.columns:
            df1[s].plot(kind='kde')
        for s in df2.columns:
            df2[s].plot(style='--', kind='kde')
        plt.xlim((-4, 9 + distance * 2))
        ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
        plt.title('AC')
        fig.savefig(save_path + '/ac.eps')

    mean1_0, var1_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1),
                                     np.expand_dims(data1.cpu().numpy(), axis=1))
    mean1_1, var1_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                     np.expand_dims(data2.cpu().numpy(), axis=1))
    mean1_2, var1_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                     np.expand_dims(data3.cpu().numpy(), axis=1))

    mean1, var1 = polynomial_mmd(np.expand_dims(g_data, axis=1), np.expand_dims(r_data, axis=1))

    ####projection
    G = G_guassian(nz=nz, num_classes=3).cuda()
    D = D_guassian(num_classes=3, AC=False, dis_mlp=dis_mlp).cuda()

    optg = optim.Adam(G.parameters(), lr=0.002, betas=(0.5, 0.999))
    optd = optim.Adam(D.parameters(), lr=0.002, betas=(0.5, 0.999))

    train(data1, data2, data3, nz, G, D, optd, optg, AC=False, MI=False, gan_loss=gan_loss)
    print('Projection training done.')

    z = torch.randn(10000, nz).cuda()
    label = torch.zeros(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data1_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda()  # torch.LongTensor(10000).random_(2).cuda()#
    data2_g = G(z=z, label=label).squeeze().cpu().detach()

    z = torch.randn(10000, nz).cuda()
    label = torch.ones(10000).long().cuda() + 1  # torch.LongTensor(10000).random_(2).cuda()#
    data3_g = G(z=z, label=label).squeeze().cpu().detach()

    df1 = pd.DataFrame()
    df2 = pd.DataFrame()

    df1['score_{0}'.format(0)] = data1_g.numpy()
    df1['score_{0}'.format(1)] = data2_g.numpy()
    df1['score_{0}'.format(2)] = data3_g.numpy()
    g_data = torch.cat([data1_g, data2_g, data3_g], dim=0).numpy()
    np.save(save_path + '/projection_data', g_data)
    df2['score_{0}'.format(2)] = g_data

    if not no_graph:
        fig, ax = plt.subplots(1, 1)
        for s in df1.columns:
            df1[s].plot(kind='kde')
        for s in df2.columns:
            df2[s].plot(style='--', kind='kde')
        plt.xlim((-4, 9 + distance * 2))
        ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
        plt.title('Projection')
        fig.savefig(save_path + '/projection.eps')

    mean2_0, var2_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1),
                                     np.expand_dims(data1.cpu().numpy(), axis=1))
    mean2_1, var2_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                     np.expand_dims(data2.cpu().numpy(), axis=1))
    mean2_2, var2_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                     np.expand_dims(data3.cpu().numpy(), axis=1))

    mean2, var2 = polynomial_mmd(np.expand_dims(g_data, axis=1), np.expand_dims(r_data, axis=1))

    result = [str(mean0_0) + ',' + str(var0_0),
              str(mean0_1) + ',' + str(var0_1),
              str(mean0_2) + ',' + str(var0_2),
              str(mean0) + ',' + str(var0),
              str(mean1_0) + ',' + str(var1_0),
              str(mean1_1) + ',' + str(var1_1),
              str(mean1_2) + ',' + str(var1_2),
              str(mean1) + ',' + str(var1),
              str(mean2_0) + ',' + str(var2_0),
              str(mean2_1) + ',' + str(var2_1),
              str(mean2_2) + ',' + str(var2_2),
              str(mean2) + ',' + str(var2)]

    file = open(save_path + f'/result_{gan_loss}.txt', 'w')

    for content in result:
        file.write(content + '\n')
Ejemplo n.º 5
0
df2['score_{0}'.format(2)] = g_data

#####Plot generated conditional distribution
fig, ax = plt.subplots(1, 1)
for s in df1.columns:
    df1[s].plot(kind='kde')

for s in df2.columns:
    df2[s].plot(style='--', kind='kde')

plt.xlim((-4, 9 + distance * 2))
ax.legend(["Class_0", "Class_1", "Class_2", "Marginal"])
plt.title(name)
fig.savefig(save_path + '/' + name + '.eps')

mean0_0, var0_0 = polynomial_mmd(np.expand_dims(data1_g.numpy(), axis=1),
                                 np.expand_dims(data1.cpu().numpy(), axis=1))
mean0_1, var0_1 = polynomial_mmd(np.expand_dims(data2_g.numpy(), axis=1),
                                 np.expand_dims(data2.cpu().numpy(), axis=1))
mean0_2, var0_2 = polynomial_mmd(np.expand_dims(data3_g.numpy(), axis=1),
                                 np.expand_dims(data3.cpu().numpy(), axis=1))
mean0, var0 = polynomial_mmd(np.expand_dims(g_data, axis=1),
                             np.expand_dims(r_data, axis=1))
print('the distance of class 0 is : ', mean0_0)
print('the distance of class 1 is : ', mean0_1)
print('the distance of class 2 is : ', mean0_2)
print('the distance of marginal is : ', mean0)

with open(args.name, 'w') as f:
    f.write(
        f'AC:\nclass 0: {mean0_0}\nclass 1: {mean0_1}\nclass 2: {mean0_2}\nmarginal: {mean0}\n'
    )