model.train() PeDurx = 50 batch_size = 50 pe = PoissonEncoder(duration=PeDurx) optimizer = Adam(model.parameters(), lr=0.003) train_loader = DataLoader(dataset=MnistDataset(training=True, flatten=False), collate_fn=collate_fn, shuffle=True, batch_size=batch_size) test_loader = DataLoader(dataset=MnistDataset(training=False, flatten=False), collate_fn=collate_fn, shuffle=False, batch_size=batch_size) for epoch in range(15): for i, (data, target) in enumerate(train_loader): target = Variable(target) for t, q in enumerate(pe.Encoding(data)): data = Variable(q, requires_grad=True) output, time = model(data, t) loss = F.Spikeloss(output, target, time_step=time) loss.backward() # calc gradients optimizer.step() # update gradients print("Epoch:" + str(epoch) + " Time: " + str(t) + " loss: " + str(loss.data))
model = SNN() model.train() PeDurx = 50 batch_size = 64 Pencoder = PoissonEncoder(duration=PeDurx) optimizer = SGD(model.parameters(), lr=0.01) train_loader = DataLoader(dataset=MnistDataset(training=True, flatten=True), collate_fn=collate_fn, shuffle=True, batch_size=batch_size) test_loader = DataLoader(dataset=MnistDataset(training=False, flatten=True), collate_fn=collate_fn, shuffle=False, batch_size=batch_size) for i in range(15): for i, (image, label) in enumerate(train_loader): label = Variable(label) for t, j in enumerate(Pencoder.Encoding(image)): image = Variable(j, requires_grad=True) t = t + 1 output = model(image, t) optimizer.zero_grad() loss = F.Spikeloss(output, label, PeDurx) print(loss.data) loss.backward() optimizer.step()
PeDurx = 45 batch_size = 64 optimizer = SGD(model.parameters(), lr=0.003) encoder = PoissonEncoder(duration=PeDurx) train_loader = DataLoader(dataset=MnistDataset(training=True, flatten=False), collate_fn=collate_fn, shuffle=True, batch_size=batch_size) test_loader = DataLoader(dataset=MnistDataset(training=False, flatten=False), collate_fn=collate_fn, shuffle=False, batch_size=batch_size) for epoch in range(15): for i, (data, target) in enumerate(train_loader): target = Variable(target) for j, q in enumerate(encoder.Encoding(data)): pdata = Variable(q, requires_grad=True) output, v_current = model(pdata, j+1) for k, v in enumerate(v_current): os.makedirs(f"image/{str(k + 1)}", exist_ok=True) if k < 4: seaborn.heatmap(v[0][0], cbar=False).figure.savefig("image/" + str(k + 1) + "/" + str(j) + ".png") else: seaborn.heatmap(v, cbar=False).figure.savefig("image/" + str(k + 1) + "/" + str(j) + ".png") optimizer.zero_grad() loss = F.Spikeloss(output, target, time_step=PeDurx) loss.backward() # calc gradients optimizer.step() # update gradients print("Epoch:" + str(epoch) + " Time: " + str(i) + " loss: " + str(loss.data))