Example #1
0
def test4(times=6, std=1):
    print('-' * 10)
    print('test 4')
    result = []
    data_size = 1000000
    train_set, val_set, test_set = load.get_CIFAR(False, None)
    for k in range(times):
        print('train model')
        use_cuda = torch.cuda.is_available()
        net = CIFAR_Net(str(k))
        cost = torch.nn.CrossEntropyLoss()
        net.train()
        n_epochs = 100
        optimizer = torch.optim.Adam(net.parameters(), lr=0.0005, weight_decay=0.001)
        start = time.time()
        if use_cuda:
            net.cuda()
            cost.cuda()
            # net = torch.nn.DataParallel(net, device_ids=[2])
            # cost = torch.nn.DataParallel(cost, device_ids=[2])
            cudnn.benchmark = True
        for i in range(n_epochs):
            train(net=net,
                  dataloader=[train_set, val_set],
                  cost=cost,
                  optimizer=optimizer,
                  epoch=i,
                  n_epochs=n_epochs,
                  use_cuda=use_cuda)
        end = time.time()
        print('train end, take {}s'.format(end - start))

        temp = test_random_N(net=net, data_size=data_size, batch_size=1000, channel=3, dim=32, use_cuda=use_cuda,
                             mean=0,
                             std=std)
        result.append(temp)

    with open("random_test_4_{}.txt".format(int(std)), "w") as f:
        for i, item in enumerate(result):
            f.write("-" * 10)
            f.write("\n number:{} \n".format(i))
            for temp in item.keys():
                f.write("label is {}, and {}% for all data \n".format(CIFAR[temp], 100 * item[temp] / data_size))

    plt.figure()
    plt.xlabel('label')
    plt.ylabel('ratio')
    plt.title('N(0,{}) {} times comparision'.format(std, times))
    x = np.linspace(0, 9, 10).astype(np.int)
    plt.xticks(x, np.array(CIFAR), rotation=45)
    plt.yticks(np.linspace(0, 100, 20))
    for j, item in enumerate(result):
        y = np.zeros(10)
        for i in item.keys():
            y[i] = 100 * item[i] / data_size
        plt.plot(x, y)

    # just suit the situation std = [1,0.001]
    plt.savefig(pic_path + 'test4_{}.png'.format(int(std)))
    plt.close()
Example #2
0
def C_CIFAR(times=3):
    use_cuda = torch.cuda.is_available()

    epochs = net_epochs['CI']
    for j, key in enumerate(net_type):
        temps = []
        for i in range(times):
            cost = torch.nn.CrossEntropyLoss(
            )  # since pi from softmax function, this Loss is softmax Loss
            # temp = []
            model = CIFAR_Net(name='CIFAR_net')

            # start = time.time()
            optimizer = torch.optim.Adam(model.parameters(),
                                         lr=0.0005,
                                         weight_decay=0.001)
            if key == 'M0':
                train_set, val_set, test_set = load.get_CIFAR(random=False,
                                                              p=None)
            if key == 'M1':
                p = M1(32)
                # ps.append(p)
                train_set, val_set, test_set = load.get_CIFAR(random=True, p=p)
            if key == 'M2':
                p = M2(32)
                # ps.append(p)
                train_set, val_set, test_set = load.get_CIFAR(random=True, p=p)

            train_time, train_loss, train_acc, val_loss, val_acc, norm, angle, weight = train_model(
                net=model,
                cost=cost,
                optimizer=optimizer,
                n_epochs=epochs,
                train_set=train_set,
                val_set=val_set,
                use_cuda=use_cuda)
            temps.append(weight)
        temp1 = temps[0]
        temp2 = temps[1]
        temp3 = temps[2]
        l = len(temp1)
        n_01 = []
        n_02 = []
        n_12 = []
        a_01 = []
        a_02 = []
        a_12 = []
        for i in range(l):
            c_01 = temp2[i] - temp1[i]
            c_02 = temp3[i] - temp1[i]
            c_12 = temp3[i] - temp2[i]
            n_01.append(np.linalg.norm(c_01, ord=2) / len(c_01))
            n_02.append(np.linalg.norm(c_02, ord=2) / len(c_02))
            n_12.append(np.linalg.norm(c_12, ord=2) / len(c_12))
            l0 = np.linalg.norm(temp1[i], ord=2)
            l1 = np.linalg.norm(temp2[i], ord=2)
            l2 = np.linalg.norm(temp3[i], ord=2)
            cos01 = temp1[i].dot(temp2[i]) / (l0 * l1)
            cos02 = temp1[i].dot(temp3[i]) / (l0 * l2)
            cos12 = temp2[i].dot(temp3[i]) / (l1 * l2)
            a_01.append(np.arccos(cos01))
            a_02.append(np.arccos(cos02))
            a_12.append(np.arccos(cos12))
        x = np.linspace(0, epochs, epochs)
        plt.figure(1)
        plt.xlabel('epochs')
        plt.ylabel('norm value')
        plt.title('comparision of different weight')
        plt.plot(x, np.array(n_01), label='0 vs 1')
        plt.plot(x, np.array(n_02), label='0 vs 2')
        plt.plot(x, np.array(n_12), label='1 vs 2')
        plt.legend(loc='upper right')
        plt.savefig(pic_path + 'CIFAR_{}_3_norm.png'.format(key))
        plt.close()

        plt.figure(2)
        plt.xlabel('epochs')
        plt.ylabel('angle value')
        plt.title('comparision of different weight')
        plt.plot(x, np.array(a_01), label='0 vs 1')
        plt.plot(x, np.array(a_02), label='0 vs 2')
        plt.plot(x, np.array(a_12), label='1 vs 2')
        plt.legend(loc='upper right')
        plt.savefig(pic_path + 'CIFAR_{}_3_angle.png'.format(key))
        plt.close()
Example #3
0
def CIFAR():
    use_cuda = torch.cuda.is_available()
    # statistic = {}
    # for key in net_type:
    #     statistic[key] = []  # time,train time, test time, test loss, test accuracy
    # ps = []
    temps = []
    # plt.figure(1)
    # plt.xlabel('epochs')
    # plt.ylabel('loss value')
    # plt.title('training loss vs validation loss')
    # plt.figure(2)
    # plt.xlabel('epochs')
    # plt.ylabel('accuracy probability')
    # plt.title('training accuracy vs validation accuracy')
    epochs = net_epochs['CIFAR']
    for j, key in enumerate(net_type):
        cost = torch.nn.CrossEntropyLoss(
        )  # since pi from softmax function, this Loss is softmax Loss
        # temp = []
        model = CIFAR_Net(name='CIFAR_net')

        # start = time.time()
        optimizer = torch.optim.Adam(model.parameters(),
                                     lr=0.0005,
                                     weight_decay=0.001)
        if key == 'M0':
            train_set, val_set, test_set = load.get_CIFAR(random=False, p=None)
        if key == 'M1':
            p = M1(32)
            # ps.append(p)
            train_set, val_set, test_set = load.get_CIFAR(random=True, p=p)
        if key == 'M2':
            p = M2(32)
            # ps.append(p)
            train_set, val_set, test_set = load.get_CIFAR(random=True, p=p)

        # train_model(net=model,
        #             cost=cost,
        #             optimizer=optimizer,
        #             n_epochs=epochs,
        #             train_set=train_set,
        #             val_set=val_set,
        #             use_cuda=use_cuda)
        plt.figure(3)
        plt.xlabel('epochs')
        plt.ylabel('norm value')
        plt.title('difference value of norm {}'.format(key))
        plt.figure(4)
        plt.xlabel('epochs')
        plt.ylabel('angle value')
        plt.title('difference value of angle {}'.format(key))
        train_time, train_loss, train_acc, val_loss, val_acc, norm, angle, weight = train_model(
            net=model,
            cost=cost,
            optimizer=optimizer,
            n_epochs=epochs,
            train_set=train_set,
            val_set=val_set,
            use_cuda=use_cuda)
        temps.append(weight)
        # x = np.linspace(0, epochs, epochs)
        # plt.figure(1)
        # plt.plot(x, train_loss, 'g' + line[j], label='M' + str(j) + ' training loss')
        # plt.plot(x, val_loss, 'b' + line[j], label='M' + str(j) + ' validation loss')
        # plt.legend(loc='upper right')
        # plt.figure(2)
        # plt.plot(x, train_acc, 'g' + line[j], label='M' + str(j) + ' training accuracy')
        # plt.plot(x, val_acc, 'b' + line[j], label='M' + str(j) + ' validation accuracy')
        # plt.legend(loc='lower right')
        # plt.figure(3)
        # plt.plot(x, norm)
        # plt.savefig(pic_path + key + '_CIFAR_norm.png')
        # plt.close()
        # plt.figure(4)
        # plt.plot(x, angle)
        # plt.savefig(pic_path + key + '_CIFAR_angle.png')
        # plt.close()

        # end = time.time()
        # temp.append(start - end)
        # temp.append(train_time)
        # test_time, loss, acc = test_model(net=model,
        #                                   cost=cost,
        #                                   test_set=test_set,
        #                                   use_cuda=use_cuda)
        # temp.append(test_time)
        # temp.append(loss)
        # temp.append(acc)
        # temp.append(train_loss[-1])
        # temp.append(train_acc[-1])
        # temp.append(val_loss[-1])
        # temp.append(val_acc[-1])
        # statistic[key].append(temp)
    # plt.figure(1)
    # plt.savefig(pic_path + 'CIFAR_loss.png')
    # plt.close()
    # plt.figure(2)
    # plt.savefig(pic_path + 'CIFAR_acc.png')
    # plt.close()

    temp1 = temps[0]
    temp2 = temps[1]
    temp3 = temps[2]
    l0 = np.linalg.norm(temp1[0], ord=2)
    l1 = np.linalg.norm(temp1[-1], ord=2)
    cos01 = temp1[0].dot(temp1[-1]) / (l0 * l1)
    print(np.arccos(cos01))
    l0 = np.linalg.norm(temp2[0], ord=2)
    l1 = np.linalg.norm(temp2[-1], ord=2)
    cos01 = temp2[0].dot(temp2[-1]) / (l0 * l1)
    print(np.arccos(cos01))
    l0 = np.linalg.norm(temp3[0], ord=2)
    l1 = np.linalg.norm(temp3[-1], ord=2)
    cos01 = temp3[0].dot(temp3[-1]) / (l0 * l1)
    print(np.arccos(cos01))
Example #4
0
def CIFAR(times=3, retrain=False):
    use_cuda = torch.cuda.is_available()
    reuse = False
    statistic = {}
    for key in net_type.keys():
        statistic[key] = [
        ]  # time,train time, test time, test loss, test accuracy
    if not retrain:
        if os.path.exists('CIFAR.csv'):
            ps = np.loadtxt('CIFAR.csv', delimiter=',').astype(int)
            reuse = True
        else:
            ps = []
    else:
        ps = []
    for i in range(times):
        plt.figure(1)
        plt.xlabel('epochs')
        plt.ylabel('loss value')
        plt.title('training loss vs validation loss ' + str(i))
        plt.figure(2)
        plt.xlabel('epochs')
        plt.ylabel('accuracy probability')
        plt.title('training accuracy vs validation accuracy ' + str(i))
        for j, key in enumerate(net_type.keys()):
            cost = torch.nn.CrossEntropyLoss(
            )  # since pi from softmax function, this Loss is softmax Loss
            temp = []
            model = CIFAR_Net(name='CIFAR_net')
            epochs = net_epochs['CIFAR']
            start = time.time()
            optimizer = torch.optim.Adam(model.parameters(),
                                         lr=0.0005,
                                         weight_decay=0.001)
            if key == 'M0':

                train_set, val_set, test_set = load.get_CIFAR(random=False,
                                                              p=None)
            if key == 'M1':
                if reuse:
                    p = ps[i * 2 + j - 1]
                else:
                    p = M1(32)
                    ps.append(p)
                train_set, val_set, test_set = load.get_CIFAR(random=True, p=p)
            if key == 'M2':

                if reuse:
                    p = ps[i * 2 + j - 1]
                else:
                    p = M2(32)
                    ps.append(p)
                train_set, val_set, test_set = load.get_CIFAR(random=True, p=p)
            train_time, train_loss, train_acc, val_loss, val_acc = train_model(
                net=model,
                cost=cost,
                optimizer=optimizer,
                n_epochs=epochs,
                train_set=train_set,
                val_set=val_set,
                use_cuda=use_cuda,
                type=key,
                index=i,
                retrain=retrain)

            x = np.linspace(0, epochs, epochs)
            plt.figure(1)
            plt.plot(x,
                     train_loss,
                     'g' + line[j],
                     label='M' + str(j) + ' training loss')
            plt.plot(x,
                     val_loss,
                     'b' + line[j],
                     label='M' + str(j) + ' validation loss')
            plt.legend(loc='upper right')
            plt.figure(2)
            plt.plot(x,
                     train_acc,
                     'g' + line[j],
                     label='M' + str(j) + ' training accuracy')
            plt.plot(x,
                     val_acc,
                     'b' + line[j],
                     label='M' + str(j) + ' validation accuracy')
            plt.legend(loc='lower right')

            end = time.time()
            temp.append(start - end)
            temp.append(train_time)
            test_time, loss, acc = test_model(net=model,
                                              cost=cost,
                                              test_set=test_set,
                                              use_cuda=use_cuda)
            temp.append(test_time)
            temp.append(loss)
            temp.append(acc)
            temp.append(train_loss[-1])
            temp.append(train_acc[-1])
            temp.append(val_loss[-1])
            temp.append(val_acc[-1])
            statistic[key].append(temp)

        plt.figure(1)
        plt.savefig(pic_root + 'CIFAR_' + str(i) + '_loss.png')
        plt.close()
        plt.figure(2)
        plt.savefig(pic_root + 'CIFAR_' + str(i) + '_acc.png')
        plt.close()
    if not reuse:
        ps = np.array(ps)
        np.savetxt('CIFAR.csv', ps, delimiter=',')
    with open("CIFAR.txt", "w") as f:
        for key in statistic.keys():
            f.write('type is {} \n'.format(key))
            for item in statistic[key]:
                f.write(
                    'this process spends totally {}s, train spends {}s, test spends {}s, '
                    'test loss is {}, test accuracy is {}; train loss is {}, train accuracy is {}'
                    'validation loss is {}, validation accuracy is {}\n'.
                    format(item[0], item[1], item[2], item[3], item[4],
                           item[5], item[6], item[7], item[8]))
Example #5
0
def plot_random_CIFAR(mean=0, std=1):
    print('-' * 10)
    print('test 4')
    print('train model')
    use_cuda = torch.cuda.is_available()
    net = CIFAR_Net('CIFAR_Net')
    cost = torch.nn.CrossEntropyLoss()
    net.train()
    n_epochs = 100
    optimizer = torch.optim.Adam(net.parameters(), lr=0.0005, weight_decay=0.001)
    start = time.time()
    if use_cuda:
        net.cuda()
        cost.cuda()
        # net = torch.nn.DataParallel(net, device_ids=[2])
        # cost = torch.nn.DataParallel(cost, device_ids=[2])
        cudnn.benchmark = True
    train_set, val_set, test_set = load.get_CIFAR(False, None)
    for i in range(n_epochs):
        train(net=net,
              dataloader=[train_set, val_set],
              cost=cost,
              optimizer=optimizer,
              epoch=i,
              n_epochs=n_epochs,
              use_cuda=use_cuda)
    end = time.time()
    print('train end, take {}s'.format(end - start))

    data_size = 1000000
    temp1 = test_random_N(net=net, data_size=data_size, batch_size=1000,
                          channel=3, dim=32, use_cuda=use_cuda,
                          mean=mean, std=std)
    temp2 = test_random_U(net=net, data_size=data_size, batch_size=1000,
                          channel=3, dim=32, use_cuda=use_cuda)

    plt.figure(1)
    plt.xlabel('label')
    plt.ylabel('ratio')
    plt.title('U[-1,1] random input')
    x = np.linspace(0, 9, 10).astype(np.int)
    y = np.zeros(10)
    plt.xticks(x, np.array(CIFAR), rotation=45)
    plt.yticks(np.linspace(0, 100, 20))
    for i in temp1.keys():
        y[i] = 100 * temp1[i] / data_size
    plt.bar(x, y, width=0.35, edgecolor='white', align='center')

    for a, b in zip(x, y):
        plt.text(a + 0.3, b + 0.05, '%.2f' % b, ha='center', va='bottom')
    plt.savefig(pic_path + 'CIFAR_U.png')
    plt.close()
    plt.figure(2)
    plt.xlabel('label')
    plt.ylabel('ratio')
    plt.title('N({},{}) random input'.format(mean, std))
    plt.xticks(x, np.array(CIFAR), rotation=45)
    plt.yticks(np.linspace(0, 100, 20))
    y = np.zeros(10)
    for i in temp2.keys():
        y[i] = 100 * temp2[i] / data_size
    plt.bar(x, y, width=0.35, edgecolor='white', align='center')

    for a, b in zip(x, y):
        plt.text(a + 0.3, b + 0.05, '%.2f' % b, ha='center', va='bottom')
    plt.savefig(pic_path + 'CIFAR_N.png')
    plt.close()