def test_sample(self): n = 1000 c = space.Cuboid(3, 4, 5) positions = c.sample(n, numpy.random) assert_equal(positions.shape, (n, 3)) assert 1 < max(positions[:, 0]) < 1.5, max(positions[:, 0]) assert -1 > min(positions[:, 0]) > -1.5 assert -1.5 > min(positions[:, 1]) > -2.0 assert -2 > min(positions[:, 2]) > -2.5
def __init__(self, model, parameters): SheetWithMagnificationFactor.__init__(self, model, parameters) dx, dy = self.cs_2_vf(parameters.sx, parameters.sy) rs = space.RandomStructure(boundary=space.Cuboid(dx, dy, 0), origin=(0.0, 0.0, 0.0), rng=mozaik.pynn_rng) self.pop = self.sim.Population( int(parameters.sx * parameters.sy / 1000000 * parameters.density), getattr(self.model.sim, self.parameters.cell.model), self.parameters.cell.params, structure=rs, initial_values=self.parameters.cell.initial_values, label=self.name)
def __init__(self, model, parameters): SheetWithMagnificationFactor.__init__(self, model, parameters) dx, dy = self.cs_2_vf(parameters.sx, parameters.sy) rs = space.RandomStructure(boundary=space.Cuboid(dx, dy, 0), origin=(0.0, 0.0, 0.0), rng=mozaik.pynn_rng) self.pop = self.sim.Population( int(parameters.sx * parameters.sy / 1000000 * parameters.density), getattr(self.model.sim, self.parameters.cell.model), self.parameters.cell.params, structure=rs, initial_values=self.parameters.cell.initial_values, label=self.name) # Forces PyNN to generate the positions to ensure the reproducibility with multiprocessing self.pop.positions
def __init__(self, model, parameters): Sheet.__init__(self, model, parameters.sx, parameters.sy, parameters) logger.info("Creating %s with %d neurons." % (self.__class__.__name__, int(parameters.sx * parameters.sy * parameters.density))) rs = space.RandomStructure(boundary=space.Cuboid( self.size_x, self.size_y, 0), origin=(0.0, 0.0, 0.0), rng=mozaik.pynn_rng) #rs = space.Grid2D(aspect_ratio=1, dx=parameters.sx/parameters.density, dy=parameters.sy/parameters.density, x0=-parameters.sx/2,y0=-parameters.sy/2,z=0.0) self.pop = self.sim.Population( int(parameters.sx * parameters.sy * parameters.density), getattr(self.model.sim, self.parameters.cell.model), self.parameters.cell.params, structure=rs, initial_values=self.parameters.cell.initial_values, label=self.name)
def __init__(self, model, parameters): # from spynnaker8.extra_models import Izhikevich_cond # import spynnaker8.extra_models as models SheetWithMagnificationFactor.__init__(self, model, parameters) dx, dy = self.cs_2_vf(parameters.sx, parameters.sy) rs = space.RandomStructure(boundary=space.Cuboid(dx, dy, 0), origin=(0.0, 0.0, 0.0), rng=mozaik.pynn_rng) # l4_cortex_inh, l4_cortex_exc: model: EIF_cond_exp_isfa_ista # Exponential integrate and fire neuron with spike triggered and sub-threshold adaptation currents # must change to Izhikevich model self.pop = self.sim.Population( int(parameters.sx * parameters.sy / 1000000 * parameters.density), getattr(self.model.sim, self.parameters.cell.model ), # no attribute 'EIF_cond_exp_isfa_ista' # getattr(models, self.parameters.cell.model), self.parameters.cell.params, structure=rs, initial_values=self.parameters.cell.initial_values, label=self.name)
def __init__(self, model, parameters): Sheet.__init__(self, model, parameters.sx, parameters.sy, parameters) logger.info("Creating *LGN* %s with %d neurons." % (self.__class__.__name__, int(parameters.sx * parameters.sy * parameters.density))) rs = space.RandomStructure(boundary=space.Cuboid( self.size_x, self.size_y, 0), origin=(0.0, 0.0, 0.0), rng=mozaik.pynn_rng) print("* getattr(self.model.sim, self.parameters.cell.model) ", getattr(self.model.sim, self.parameters.cell.model)) #rs = space.Grid2D(aspect_ratio=1, dx=parameters.sx/parameters.density, dy=parameters.sy/parameters.density, x0=-parameters.sx/2,y0=-parameters.sy/2,z=0.0) self.pop = self.sim.Population( int(parameters.sx * parameters.sy * parameters.density), getattr( self.model.sim, self.parameters.cell.model), # replace with SpikeSourceArray self.parameters.cell.params, structure=rs, initial_values=self.parameters.cell.initial_values, # cellclass = self.sim.SpikeSourceArray(spike_times=[])) # spike times from nest/lgn output label=self.name)
'tau_syn_E': 2.0, 'tau_syn_I': 5.0 } sphere = space.Sphere(radius=100.0) struct = space.RandomStructure(sphere, origin=(0.0, 100.0, 0.0)) pop_pre = sim.Population(pop_size, sim.IF_cond_alpha(**cell_params), label="pop_pre", structure=struct) pop_pre.record('v') pop_pre.annotate(radius=5) pop_pre.annotate(color='0 0.6 0') cuboid = space.Cuboid(30, 40, 50) struct = space.RandomStructure(cuboid, origin=(-200.0, 0.0, -200.0)) pop_post = sim.Population(pop_size, sim.IF_cond_alpha(**cell_params), label="pop_post", structure=struct) pop_post.record('v') pop_post.annotate(radius=5) pop_post.annotate(color='0.2 0.2 1') sim.run(tstop) for pop in [pop_pre, pop_post]: print("Positions of %s: %s" % (pop.label, pop.positions))
'v_reset': 0.0, # (mV) 'v_thresh': 20.0, # (mV) 'cm': 0.5 } # (nF) RateScale = 1e6 Tstep = 10.0 rngseed = 1240498 width = 1.0 depth = 1.0 height = 6.0 exc_structure = space.RandomStructure( boundary=space.Cuboid(width, height, depth)) inh_structure = space.RandomStructure( boundary=space.Cuboid(width, height, depth)) c_lambda = 1.0 exc_connector = DistanceDependentProbabilityConnector('exp(-d*d/(%f*%f))' % (c_lambda, c_lambda), weights=0.1, delays=1.0) inh_connector = DistanceDependentProbabilityConnector('exp(-d*d/(%f*%f))' % (c_lambda, c_lambda), weights=-0.4, delays=1.0)