Пример #1
0
if __name__ == "__main__":

    # kernels = [ utils.DoGKernel(3,1,2), utils.DoGKernel(3,2,1),
    #             utils.OnCenter(3), utils.OffCenter(3)]

    kernels = [utils.DoGKernel(3,1,2), utils.DoGKernel(3,2,1)]


    filter = utils.Filter(kernels, padding = 6, thresholds = 50)

    transform = InputTransform(filter)

    data_root = 'data/'

    MNIST_train = utils.CacheDataset(MNIST(root=data_root, train=True, download=True, transform=transform)) # 60000 x 30 x 30
    MNIST_test = utils.CacheDataset(MNIST(root=data_root, train=True, download=True, transform=transform)) # 10000 x 30

    MNIST_loader = DataLoader(MNIST_train, batch_size=1000, shuffle=True)
    MNIST_test_loader = DataLoader(MNIST_test, batch_size=1000, shuffle=False)


    net = CTNN()
    clf = svm.SVC()

    net = train(net, MNIST_loader)
    torch.save(net.state_dict(), "./MNISTcheckpoint.pt")
    net.state_dict(torch.load("./MNISTcheckpoint.pt"))
    train_outputs, train_y = inference(net, MNIST_loader)
    test_outputs, test_y  = inference(net, MNIST_test_loader)
    train_outputs, test_outputs = preprocess(train_outputs, test_outputs)
Пример #2
0
kernels = [
    utils.DoGKernel(3, 3 / 9, 6 / 9),
    utils.DoGKernel(3, 6 / 9, 3 / 9),
    utils.DoGKernel(7, 7 / 9, 14 / 9),
    utils.DoGKernel(7, 14 / 9, 7 / 9),
    utils.DoGKernel(13, 13 / 9, 26 / 9),
    utils.DoGKernel(13, 26 / 9, 13 / 9)
]
filter = utils.Filter(kernels, padding=6, thresholds=50)
s1c1 = S1C1Transform(filter)

data_root = "data"
MNIST_train = utils.CacheDataset(
    torchvision.datasets.MNIST(root=data_root,
                               train=True,
                               download=True,
                               transform=s1c1))
MNIST_test = utils.CacheDataset(
    torchvision.datasets.MNIST(root=data_root,
                               train=False,
                               download=True,
                               transform=s1c1))
MNIST_loader = DataLoader(MNIST_train, batch_size=1000, shuffle=False)
MNIST_testLoader = DataLoader(MNIST_test,
                              batch_size=len(MNIST_test),
                              shuffle=False)

mozafari = MozafariMNIST2018()
if use_cuda:
    mozafari.cuda()
Пример #3
0
    utils.GaborKernel(5, 45 + 22.5),
    utils.GaborKernel(5, 90 + 22.5),
    utils.GaborKernel(5, 135 + 22.5),
    utils.GaborKernel(5, 180 + 22.5)
]

filter = utils.Filter(kernels, use_abs=True)
lateral_inhibition = utils.LateralIntencityInhibition(
    [0.15, 0.12, 0.1, 0.07, 0.05])

task = "Caltech"
use_cuda = True

if task == "Caltech":
    s1c1 = S1C1Transform(filter, 7, 6, lateral_inhibition)
    trainsetfolder = utils.CacheDataset(ImageFolder("facemotortrain", s1c1))
    testsetfolder = utils.CacheDataset(ImageFolder("facemotortest", s1c1))
    mozafari = Mozafari2018(4, 10, 2, (17, 17), 42, (0.005, -0.0025),
                            (-0.005, 0.0005), 0.5)
    trainset = DataLoader(trainsetfolder,
                          batch_size=len(trainsetfolder),
                          shuffle=True)
    testset = DataLoader(testsetfolder,
                         batch_size=len(testsetfolder),
                         shuffle=True)
    max_epoch = 400
elif task == "ETH":
    s1c1 = S1C1Transform(filter, 5, 4, lateral_inhibition)
    mozafari = Mozafari2018(4, 10, 8, (31, 31), 160, (0.01, -0.0035),
                            (-0.01, 0.0006), 0.4)
Пример #4
0
dataset = ImageFolder("D:\PyCharm 2020.3.5\pythonProject\dataset\eth",
                      transform)  # adding transform to the dataset

# splitting training and testing sets
indices = list(range(len(dataset)))
random.shuffle(indices)
split_point = int(0.75 * len(indices))
train_indices = indices[:split_point]
test_indices = indices[split_point:]
print("Size of the training set:", len(train_indices))
print("Size of the  testing set:", len(test_indices))
from torch.utils.data import DataLoader
from torch.utils.data import SubsetRandomSampler

dataset = utils.CacheDataset(dataset)
train_loader = DataLoader(dataset, sampler=SubsetRandomSampler(train_indices))
test_loader = DataLoader(dataset, sampler=SubsetRandomSampler(test_indices))

import SpykeTorch.snn as snn

pool = snn.Pooling(kernel_size=3, stride=2)
conv = snn.Convolution(in_channels=4, out_channels=20, kernel_size=30)
stdp = snn.STDP(conv_layer=conv, learning_rate=(0.05, -0.015))
for iter in range(300):
    print('\rIteration:', iter, end="")
    for data, _ in train_loader:
        for x in data:
            x = pool(x)
            p = conv(x)
            o, p = sf.fire(p, 20, return_thresholded_potentials=True)
        temporal_image = self.temporal_transform(image)
        return temporal_image.sign().byte()


kernels = [
    utils.DoGKernel(7, 1, 2),
    utils.DoGKernel(7, 2, 1),
]
filter = utils.Filter(kernels, padding=3, thresholds=50, type="gray")
s1 = S1Transform(filter)

data_root = "data"
# MNIST_train = utils.CacheDataset(torchvision.datasets.MNIST(root=data_root, train=True, download=True, transform=s1))
# print(type(MNIST_train))

MNIST_test = utils.CacheDataset(ImageFolder(root="demo", transform=s1))
# MNIST_test = utils.CacheDataset(torchvision.datasets.MNIST(root=data_root, train=False, download=True, transform=s1))
# MNIST_loader = DataLoader(MNIST_train, batch_size=len(MNIST_train), shuffle=False)
MNIST_testLoader = DataLoader(MNIST_test,
                              batch_size=len(MNIST_test),
                              shuffle=False)

kheradpisheh = KheradpishehMNIST()
if use_cuda:
    kheradpisheh.cuda()

# Training The First Layer
print("load weights for the first layer")
if os.path.isfile("weights/saved_l1.net"):
    if use_cuda:
        kheradpisheh.load_state_dict(torch.load("weights/saved_l1.net"))
Пример #6
0
    xtest = sample_zero_mean(xtest)
    xtest = gcn(xtest)
    return x, xtest

if __name__ == "__main__":

    kernels = [ utils.DoGKernel(3,1,2), utils.DoGKernel(3,2,1)]

    filter = utils.Filter(kernels, padding = 6, thresholds = 50)

    transform = InputTransform(filter)

    data_root = 'data/'
    cifar_data_root = 'cifar/'

    MNIST_train = utils.CacheDataset(MNIST(root=data_root, train=True, download=True, transform=transform)) # 60000 x 28 x 28
    MNIST_test = utils.CacheDataset(MNIST(root=data_root, train=False, download=True, transform=transform)) # 10000 x 28 

    MNIST_loader = DataLoader(MNIST_train, batch_size=1000, shuffle=True)
    MNIST_test_loader = DataLoader(MNIST_test, batch_size=1000, shuffle=False)

    CIFAR_train = utils.CacheDataset(CIFAR(root=cifar_data_root, train=True, download=True, transform=transform)) # 60000 x 30 x 30
    CIFAR_test = utils.CacheDataset(CIFAR(root=cifar_data_root, train=False, download=True, transform=transform)) # 10000 x 30

    CIFAR_loader = DataLoader(CIFAR_train, batch_size=1000, shuffle=True)
    CIFAR_test_loader = DataLoader(CIFAR_test, batch_size=1000, shuffle=False)
    

    net = CTNN()
    clf = svm.SVC(verbose=True) # (60000 x 100)