Пример #1
0
 def __init__(self,
              netParams,
              device,
              optimizer,
              testSet,
              classification=False):
     self.netParams = netParams
     self.testLoader = DataLoader(dataset=testSet,
                                  batch_size=8,
                                  shuffle=False,
                                  num_workers=4)
     self.device = device
     self.optimizer = optimizer
     error = snn.loss(self.netParams).to(self.device)
     self.criterion = Criterion(error,
                                netParams['training']['error']['type'])
     self.theta = netParams['neuron']['theta']
     self.classification = classification
Пример #2
0
 def __init__(self,
              netParams,
              device,
              optimizer,
              trainingSet,
              classification=False,
              collect_weights=False):
     self.netParams = netParams
     self.trainLoader = DataLoader(dataset=trainingSet,
                                   batch_size=8,
                                   shuffle=False,
                                   num_workers=4)
     self.device = device
     self.optimizer = optimizer
     error = snn.loss(self.netParams).to(self.device)
     self.criterion = Criterion(error,
                                netParams['training']['error']['type'])
     self.classification = classification
     self.weightCollector = WeightCollector() if collect_weights else None
Пример #3
0
            if not os.path.exists(os.path.join(path, member)):
                zip_file.extract(member, path)
            else:
                print('extraction path already exist',
                      os.path.join(path, member))


# extract_dataset()

device = torch.device("cuda" if USE_CUDA else "cpu")

#Create network instance.
net = Network(netParams).to(device)

# Create snn loss instance.
error = snn.loss(netParams).to(device)

# Define optimizer module.
optimizer = torch.optim.Adam(net.parameters(), lr=0.01, amsgrad=True)

# Dataset and dataLoader instances.
trainingSet = nmnistDataset(datasetPath=netParams['training']['path']['in'],
                            sampleFile=netParams['training']['path']['train'],
                            samplingTime=netParams['simulation']['Ts'],
                            sampleLength=netParams['simulation']['tSample'])
trainLoader = DataLoader(dataset=trainingSet,
                         batch_size=8,
                         shuffle=False,
                         num_workers=4)

testingSet = nmnistDataset(datasetPath=netParams['training']['path']['in'],