Пример #1
0
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    ])
    valid_set = Subset(
        datasets.CIFAR10(root='./cifardata',
                         train=True,
                         transform=test_transf,
                         download=True), idx[:10000])
    valid_loader = DataLoader(valid_set, batch_size=batch_size, shuffle=True)

    m_t_losses = []
    m_t_accs = []
    m_v_losses = []
    m_v_accs = []

    for ii in range(repeats):
        model = SiCNN_3(f_in, size, ratio, nratio, srange)
        model.to(device)

        train_loss = []
        train_acc = []
        valid_loss = []
        valid_acc = []
        for epoch in range(1, nb_epochs + 1):
            train_l, train_a = train(model, train_loader, learning_rate,
                                     criterion, epoch, batch_log, device)
            train_l, train_a = test(model, train_loader, criterion, epoch,
                                    batch_log, device)
            valid_l, valid_a = test(model, valid_loader, criterion, epoch,
                                    batch_log, device)
            train_loss.append(train_l)
            train_acc.append(train_a)
Пример #2
0
    test_set = datasets.MNIST(root=root,
                              train=False,
                              transform=uniform,
                              download=True)
    test_loader = DataLoader(dataset=test_set,
                             batch_size=batch_size,
                             shuffle=False,
                             num_workers=1,
                             pin_memory=True)

    s_test_losses = []
    s_test_accs = []

    for ii in range(repeats):
        model = SiCNN_3(f_in, size, ratio, nratio, srange, padding)
        model.to(device)

        for epoch in range(1, nb_epochs + 1):
            train_l, train_a = train(model, train_loader, learning_rate,
                                     criterion, epoch, batch_log, device)
            train_l, train_a = test(model, train_loader, criterion, epoch,
                                    batch_log, device)

        test_l, test_a = test(model, test_loader, criterion, epoch, batch_log,
                              device)

        s_test_losses.append(test_l)
        s_test_accs.append(test_a)

        pickle.dump(model, log)
Пример #3
0
}

log = open("cifar_gaussian_log_results.pickle", "wb")

scales = [0.40, 0.52, 0.64, 0.76, 0.88, 1.0, 1.12, 1.24, 1.36, 1.48, 1.60]

root = './cifardata'
if not os.path.exists(root):
    os.mkdir(root)

criterion = nn.CrossEntropyLoss()

models = [
    #kanazawa(f_in, ratio, nratio, srange=0),
    #kanazawa(f_in, ratio, nratio, srange),
    SiCNN_3(f_in, size, ratio, nratio, srange=0),
    SiCNN_3(f_in, size, ratio, nratio, srange),
    SiCNN_3(f_in, filter_size(size, ratio, nratio), 1 / ratio, nratio, srange),
    SiCNN_3big(f_in, size, ratio, nratio, srange=0),
    SiCNN_3(f_in, size, ratio=2**(1 / 3), nratio=6, srange=0),
    SiCNN_3(f_in, size, ratio=2**(1 / 3), nratio=6, srange=srange),
    SiCNN_3(f_in,
            filter_size(size, 2**(1 / 3), 6),
            ratio=2**(-1 / 3),
            nratio=6,
            srange=srange)
]

for m in range(len(models)):
    locals()['test_losses_{0}'.format(m)] = []
    locals()['test_accs_{0}'.format(m)] = []
                          shuffle=True,
                          num_workers=1,
                          pin_memory=True)

test_set = datasets.MNIST(root=root,
                          train=False,
                          transform=uniform,
                          download=True)
test_loader = DataLoader(dataset=test_set,
                         batch_size=batch_size,
                         shuffle=False)

criterion = nn.CrossEntropyLoss()

models = [
    SiCNN_3(f_in=1, size=3, ratio=2**(2 / 3), nratio=3, srange=2),
    SiCNN_3(1, filter_size(3, 2**(2 / 3), 3), 2**(-2 / 3), 3, 2),
    SiCNN_3(1, 5, 2**(2 / 3), 3, 2),
    SiCNN_3(1, filter_size(5, 2**(2 / 3), 3), 2**(-2 / 3), 3, 2, padding=1),
    SiCNN_3(1, 5, 2**(1 / 3), 6, 2),
    SiCNN_3(1, filter_size(5, 2**(1 / 3), 6), 2**(-1 / 3), 6, 2, padding=1)
]
pickle.dump(len(models), log)

for ii, model in enumerate(models):
    print("model {}".format(ii))

    model.to(device)

    train_loss = []
    train_acc = []
Пример #5
0
dataiter = iter(test_loader) 
images, labels = dataiter.next()

print(' '.join('%5s' % classes[labels[j]] for j in range(4)))
img = torchvision.utils.make_grid(images)
img = img / 2 + 0.5     # unnormalize
npimg = img.numpy()
plt.imshow(np.transpose(npimg, (1, 2, 0)))
plt.show()

"""

criterion = nn.CrossEntropyLoss()

models = [
    SiCNN_3(f_in, size, ratio, nratio, srange, padding, nb_classes),
    Model(f_in, size, ratio, nratio, srange, padding, nb_classes)
]

pickle.dump(len(models), log)

for model in models: 
    model.to(device)

    train_loss=[]
    train_acc = []
    valid_loss = []
    valid_acc = []

    for epoch in range(1, nb_epochs + 1): 
        train_l, train_a = train(model, train_loader, learning_rate, criterion, epoch, batch_log, device)