def setUp(self): sim.setup(num_processes=2, rank=1, min_delay=0.123) self.p1 = sim.Population(9, sim.IF_cond_exp(), structure=space.Grid2D(aspect_ratio=1.0, dx=1.0, dy=1.0)) self.p2 = sim.Population(9, sim.HH_cond_exp(), structure=space.Grid2D(aspect_ratio=1.0, dx=1.0, dy=1.0)) assert_array_equal(self.p2._mask_local, numpy.array([1,0,1,0,1,0,1,0,1], dtype=bool))
def setup(self): self.grid1 = space.Grid2D() self.grid2 = space.Grid2D(aspect_ratio=3.0, dx=11.1, dy=9.9, x0=123, y0=456, z=789)
def test_set_structure(self): p = sim.Population(11, sim.IF_cond_exp(), structure=space.Grid2D()) pv = p[2, 5, 7, 8] new_struct = space.Line() def set_struct(struct): pv.structure = struct self.assertRaises(AttributeError, set_struct, new_struct)
def test_set_structure(self, sim=sim): p = sim.Population(11, sim.IF_cond_exp()) p.positions = numpy.arange(33).reshape(3, 11) new_struct = space.Grid2D() p.structure = new_struct self.assertEqual(p.structure, new_struct) self.assertEqual(p._positions, None)
def create_output_layer(input_layer, weights_tuple, delta, layer_name, refrac): """ Builds a layer which connects to the input_layer according to the given parameters. Parameters: `input_layer`: The input layer `weights_tuple`: A tuple of the form (weights, weights_shape) `delta`: The vertical and horizontal offset of the output layers squares `layer_name`: The name of the input layer `refrac`: The refractory period of the output layer neurons Returns: An output layer which is connected to the given input layer according to the given parameters """ # print('Number of output neurons {} for size {}x{}'.format(\ # total_output_neurons, t_n, t_m)) n, m = how_many_squares_in_shape(input_layer.shape, weights_tuple[1], delta) total_output_neurons = n * m print('Layer:', layer_name) print('Output layer has shape', n, m) output_layer = Layer( sim.Population(total_output_neurons, sim.IF_curr_exp(tau_refrac=refrac), structure=space.Grid2D(aspect_ratio=m / n), label=layer_name), (n, m)) connect_layer_to_layer(input_layer, output_layer, weights_tuple[1], delta, weights_tuple[0]) return output_layer
## Load the spikes spikes_on, spikes_off = load_lgn_spikes(contrast, N_lgn_layers) # Spike functions def spike_times(simulator, layer, spikes_file): return [simulator.Sequence(x) for x in spikes_file[layer]] # Spatial structure of on LGN cells # On cells x0, y0, dx, dy = return_lgn_starting_coordinates(positions_on, Nside_lgn) lgn_structure_on = space.Grid2D(aspect_ratio=1, x0=x0, y0=y0, dx=dx, dy=dy, z=0) # Off cells x0, y0, dx, dy = return_lgn_starting_coordinates(positions_off, Nside_lgn) lgn_structure_off = space.Grid2D(aspect_ratio=1, x0=x0, y0=y0, dx=dx, dy=dy, z=0) # Cells models for the LGN spikes (SpikeSourceArray) lgn_spikes_on_models = []
} synaptic_parameters = { 'excitatory': { 'timing_dependence': {'tau_plus': 20.0, 'tau_minus': 20.0}, 'weight_dependence': {'w_min':0, 'w_max': 0.04, 'A_plus': 0.01, 'A_minus': 0.012}, 'weight': 0.01, 'delay': '0.1+0.001*d'}, 'inhibitory': {'weight': 0.05, 'delay': '0.1+0.001*d'}, 'input': {'weight': 0.01, 'delay': 0.1}, } sim.setup() all_cells = sim.Population(n_exc+n_inh, sim.IF_cond_exp(**cell_parameters), structure=space.Grid2D(**grid_parameters), label="All Cells") exc_cells = all_cells[:n_exc]; exc_cells.label = "Excitatory cells" inh_cells = all_cells[n_exc:]; inh_cells.label = "Inhibitory cells" ext_stim = sim.Population(n_stim, sim.SpikeSourcePoisson(**stimulation_parameters), label="External Poisson stimulation") stdp_mechanism = sim.STDPMechanism( timing_dependence=sim.SpikePairRule(**synaptic_parameters['excitatory']['timing_dependence']), weight_dependence=sim.AdditiveWeightDependence(**synaptic_parameters['excitatory']['weight_dependence']), weight=synaptic_parameters['excitatory']['weight'], delay=synaptic_parameters['excitatory']['delay']) gaussian_connectivity = sim.DistanceDependentProbabilityConnector( **connectivity_parameters['gaussian'])
def create_S2_layers(C1_layers: Dict[float, Sequence[Layer]], feature_size, s2_prototype_cells, refrac_s2=.1, stdp=True, inhibition=True)\ -> Dict[float, List[Layer]]: """ Creates all prototype S2 layers for all sizes. Parameters: `layers_dict`: A dictionary containing for each size a list of C1 layers, for each feature one `feature_size`: `s2_prototype_cells`: `refrac_s2`: `stdp`: Returns: A dictionary containing for each size a list of different S2 layers, for each prototype one. """ f_s = feature_size initial_weight = 25 / (f_s * f_s) weight_rng = rnd.RandomDistribution('normal', mu=initial_weight, sigma=initial_weight / 20) i_offset_rng = rnd.RandomDistribution('normal', mu=.5, sigma=.45) weights = list( map(lambda x: weight_rng.next() * 1000, range(4 * f_s * f_s))) S2_layers = {} i_offsets = list( map(lambda x: i_offset_rng.next(), range(s2_prototype_cells))) ndicts = list(map(lambda x: {}, range(s2_prototype_cells))) ondicts = list(map(lambda x: {}, range(s2_prototype_cells))) omdicts = list(map(lambda x: {}, range(s2_prototype_cells))) for size, layers in C1_layers.items(): n, m = how_many_squares_in_shape(layers[0].shape, (f_s, f_s), f_s) if stdp: l_i_offsets = [list(map(lambda x: rnd.RandomDistribution('normal', mu=i_offsets[i], sigma=.25).next(), range(n * m)))\ for i in range(s2_prototype_cells)] else: l_i_offsets = np.zeros((s2_prototype_cells, n * m)) print('S2 Shape', n, m) layer_list = list( map( lambda i: Layer( sim.Population(n * m, sim.IF_curr_exp(i_offset=l_i_offsets[i], tau_refrac=refrac_s2), structure=space.Grid2D(aspect_ratio=m / n), label=str(i)), (n, m)), range(s2_prototype_cells))) for S2_layer in layer_list: for C1_layer in layers: S2_layer.projections[C1_layer.population.label] =\ connect_layer_to_layer(C1_layer, S2_layer, (f_s, f_s), f_s, [[w] for w in weights[:f_s * f_s]], stdp=stdp, initial_weight=initial_weight, ndicts=ndicts, ondicts=ondicts, omdicts=omdicts) S2_layers[size] = layer_list # Set the labels of the shared connections if stdp: t = time.clock() print('Set shared labels') for s2_label_dicts in [ndicts, ondicts, omdicts]: for i in range(s2_prototype_cells): w_iter = weights.__iter__() for label, (source, target) in s2_label_dicts[i].items(): conns = nest.GetConnections(source=source, target=target) nest.SetStatus(conns, { 'label': label, 'weight': w_iter.__next__() }) print('Setting labels took', time.clock() - t) if inhibition: # Create inhibitory connections between the S2 cells # First between the neurons of the same layer... inh_weight = -10 inh_delay = .1 print('Create S2 self inhibitory connections') for layer_list in S2_layers.values(): for layer in layer_list: sim.Projection( layer.population, layer.population, sim.AllToAllConnector(allow_self_connections=False), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) # ...and between the layers print('Create S2 cross-scale inhibitory connections') for i in range(s2_prototype_cells): for layer_list1 in S2_layers.values(): for layer_list2 in S2_layers.values(): if layer_list1[i] != layer_list2[i]: sim.Projection( layer_list1[i].population, layer_list2[i].population, sim.AllToAllConnector(), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) if stdp: # Create the inhibition between different prototype layers print('Create S2 cross-prototype inhibitory connections') for layer_list in S2_layers.values(): for layer1 in layer_list: for layer2 in layer_list: if layer1 != layer2: sim.Projection( layer1.population, layer2.population, sim.OneToOneConnector(), sim.StaticSynapse(weight=inh_weight - 1, delay=inh_delay)) return S2_layers
tau_syn_E = 5.0 tau_syn_I = 5.0 cm = tau_m / R # It seems that the resting potential is -65 for every neuron model = simulator.IF_curr_exp(cm=cm, i_offset=i_offset, tau_m=tau_m, tau_refrac=tau_refractory, tau_syn_E=tau_syn_E, tau_syn_I=tau_syn_I, v_reset=v_rest, v_thresh=v_thresh) # Spatial structure retinal_structure = space.Grid2D(aspect_ratio=1, dx=1.0, dy=1.0, z=0) # Populations retinal_neurons = simulator.Population(N_retina, model, structure=retinal_structure, label='Retina') lgn_neurons = simulator.Population(N_lgn, model, structure=retinal_structure, label='LGN') # Initialize populations v_random = np.random.rand(N_retina) * (v_thresh - v_rest) + v_rest #retinal_neurons.initialize(v=v_random) # Random Initialization