def __init__(self, netParams):
     super(Network, self).__init__()
     # initialize slayer
     slayer = spikeLayer(netParams['neuron'], netParams['simulation'])
     self.slayer = slayer
     # define network functions
     self.fc1 = slayer.dense((34 * 34 * 2), 512)
     self.fc2 = slayer.dense(512, 10)
Esempio n. 2
0
 def __init__(self, netParams):
     super(Network, self).__init__()
     # initialize slayer
     slayer = spikeLayer(netParams['neuron'], netParams['simulation'])
     self.slayer = slayer
     # define network functions
     self.fc1 = slayer.dense(Nin, Nhid)
     self.fc2 = slayer.dense(Nhid, Nout)
 def __init__(self, netParams):
     super(Network, self).__init__()
     # initialize slayer
     slayer = spikeLayer(netParams['neuron'], netParams['simulation'])
     self.slayer = slayer
     # define network functions
     self.conv1 = slayer.conv(2, 16, 5, padding=2, weightScale=10)
     self.conv2 = slayer.conv(16, 32, 3, padding=1, weightScale=50)
     self.pool1 = slayer.pool(4)
     self.pool2 = slayer.pool(2)
     self.pool3 = slayer.pool(2)
     self.fc1 = slayer.dense((8 * 8 * 32), 512)
     self.fc2 = slayer.dense(512, 11)
     self.drop = slayer.dropout(0.1)
Esempio n. 4
0
 def __init__(self,
              params,
              input_size,
              hidden_size,
              output_size,
              quantize=True):
     super(SlayerLoihiMLP, self).__init__()
     self.hidden_size = hidden_size
     self.output_size = output_size
     self.quantize = quantize
     self.slayer = spikeLayer(params["neuron"], params["simulation"])
     self.fc1 = self.slayer.dense(input_size,
                                  hidden_size,
                                  quantize=self.quantize)
     self.fc2 = self.slayer.dense(hidden_size,
                                  output_size,
                                  quantize=self.quantize)
Esempio n. 5
0
    def __init__(
        self,
        params,
        tact_input_size,
        vis_input_size,
        tact_output_size,
        vis_output_size,
        output_size,
    ):
        super(SlayerLoihiMM, self).__init__()
        self.tact_output_size = tact_output_size
        self.vis_output_size = vis_output_size
        self.output_size = output_size
        self.slayer = spikeLayer(params["neuron"], params["simulation"])

        self.tact_fc = self.slayer.dense(tact_input_size, tact_output_size)
        self.vis_fc = self.slayer.dense(vis_input_size, vis_output_size)
        self.combi = self.slayer.dense(tact_output_size + vis_output_size,
                                       output_size)
Esempio n. 6
0
net_params = SlayerParams('test_files/Loihi/network.yaml')

if verbose is True:
    print('Neuron Type:', 'LOIHI')
    print('vThMant :', 80)
    print('vDecay  :', 128)
    print('iDecay  :', 1024)
    print('refDelay:', 1)

# Network structure
Ns = int(net_params['simulation']['tSample'] / net_params['simulation']['Ts'])
Nin = int(net_params['layer'][0]['dim'])
Nhid = int(net_params['layer'][1]['dim'])
Nout = int(net_params['layer'][2]['dim'])

slayer = spikeLayer(net_params['neuron'], net_params['simulation']).to(device)

if verbose is True: print('Neuron Threshold =', slayer.neuron['theta'])

# define network functions
fc1 = slayer.dense(Nin, Nhid).to(device)
fc2 = slayer.dense(Nhid, Nout).to(device)

# load input spikes
spikeAER = np.loadtxt('test_files/Loihi/snnData/spikeIn.txt')
spikeAER[:, 0] /= net_params['simulation']['Ts']
spikeAER[:, 1] -= 1
# spikeAER = np.fliplr(np.loadtxt('loihiInputSpikes.txt'))
spikeAER[:,
         0] += 49  # 49 not 50 because the input spikes seem to be transmitted without delay