def _dd_is_there_a_connection(
         self, d_expression, distances, rng=None):
     if rng is None:
         rng = NumpyRNG(seed=self.connectionSeeds._parent_seed)
     dd_potential_prob = rng.uniform(low=0.0, high=1.0,
                                     size=distances.shape)
     dd_actual_prob = numpy.fromfunction(
         self._distance_dependence, shape=distances.shape, dtype=int,
         d_expression=d_expression, distances=distances)
     return dd_potential_prob < dd_actual_prob
Esempio n. 2
0
def setup(timestep=DEFAULT_TIMESTEP, min_delay=DEFAULT_MIN_DELAY,
          **extra_params):
    """
    Should be called at the very beginning of a script.

    `extra_params` contains any keyword arguments that are required by a given
    simulator but not by others.

    NEST-specific extra_params:

    `spike_precision`:
        should be "off_grid" (default) or "on_grid"
    `verbosity`:
        INSERT DESCRIPTION OF POSSIBLE VALUES
    `recording_precision`:
        number of decimal places (OR SIGNIFICANT FIGURES?) in recorded data
    `threads`:
        number of threads to use
    `grng_seed`:
        one seed for the global random number generator of NEST
    `rng_seeds`:
        a list of seeds, one for each thread on each MPI process
    `rng_seeds_seed`:
        a single seed that will be used to generate random values for `rng_seeds`
    """
    max_delay = extra_params.get('max_delay', DEFAULT_MAX_DELAY)
    common.setup(timestep, min_delay, **extra_params)
    simulator.state.clear()
    for key in ("verbosity", "spike_precision", "recording_precision",
                "threads"):
        if key in extra_params:
            setattr(simulator.state, key, extra_params[key])
    # set kernel RNG seeds
    simulator.state.num_threads = extra_params.get('threads') or 1
    if 'grng_seed' in extra_params:
        simulator.state.grng_seed = extra_params['grng_seed']
    if 'rng_seeds' in extra_params:
        simulator.state.rng_seeds = extra_params['rng_seeds']
    else:
        rng = NumpyRNG(extra_params.get('rng_seeds_seed', 42))
        n = simulator.state.num_processes * simulator.state.threads
        simulator.state.rng_seeds = rng.next(n, 'uniform_int', {'low': 0, 'high': 100000}).tolist()
    # set resolution
    simulator.state.dt = timestep
    # Set min_delay and max_delay
    simulator.state.set_delays(min_delay, max_delay)
    nest.SetDefaults('spike_generator', {'precise_times': True})
    return rank()
Esempio n. 3
0
class Grid3D(BaseStructure):
    """
    Represents a structure with neurons distributed on a 3D grid.

    Arguments:
        `dx`, `dy`, `dz`:
            distances between points in the x, y, z directions.
        `x0`, `y0`. `z0`:
            coordinates of the starting corner of the grid.
        `aspect_ratioXY`, `aspect_ratioXZ`:
            ratios of the number of grid points per side (not the ratio of the
            side lengths, unless ``dx == dy == dz``)
        `fill_order`:
            may be 'sequential' or 'random'.

    If `fill_order` is 'sequential', the z-index will be filled first, then y
    then x, i.e. the first cell will be at (0,0,0) (given default values for
    the other arguments), the second at (0,0,1), etc.
    """
    parameter_names = ("aspect_ratios", "dx", "dy", "dz", "x0", "y0", "z0", "fill_order")

    def __init__(self, aspect_ratioXY=1.0, aspect_ratioXZ=1.0, dx=1.0, dy=1.0,
                 dz=1.0, x0=0.0, y0=0.0, z0=0, fill_order="sequential", rng=None):
        self.aspect_ratios = (aspect_ratioXY, aspect_ratioXZ)
        assert fill_order in ('sequential', 'random')
        self.fill_order = fill_order
        self.rng = rng
        self.dx = dx; self.dy = dy; self.dz = dz
        self.x0 = x0; self.y0 = y0; self.z0 = z0

    def calculate_size(self, n):
        """docstring goes here"""
        a, b = self.aspect_ratios
        nx = int(round(math.pow(n * a * b, 1 / 3.0)))
        ny = int(round(nx / a))
        nz = int(round(nx / b))
        assert nx * ny * nz == n, str((nx, ny, nz, nx * ny * nz, n, a, b))
        return nx, ny, nz

    def generate_positions(self, n):
        nx, ny, nz = self.calculate_size(n)
        x, y, z = numpy.indices((nx, ny, nz), dtype=float)
        x = self.x0 + self.dx * x.flatten()
        y = self.y0 + self.dy * y.flatten()
        z = self.z0 + self.dz * z.flatten()
        positions = numpy.array((x, y, z))
        if self.fill_order == 'sequential':
            return positions
        else:
            if self.rng is None:
                self.rng = NumpyRNG()
            return self.rng.permutation(positions.T).T
    generate_positions.__doc__ = BaseStructure.generate_positions.__doc__
Esempio n. 4
0
 def generate_positions(self, n):
     nx, ny, nz = self.calculate_size(n)
     x, y, z = numpy.indices((nx, ny, nz), dtype=float)
     x = self.x0 + self.dx * x.flatten()
     y = self.y0 + self.dy * y.flatten()
     z = self.z0 + self.dz * z.flatten()
     positions = numpy.array((x, y, z))
     if self.fill_order == 'sequential':
         return positions
     else:
         if self.rng is None:
             self.rng = NumpyRNG()
         return self.rng.permutation(positions.T).T
Esempio n. 5
0
 def generate_positions(self, n):
     nx, ny = self.calculate_size(n)
     x,y,z = numpy.indices((nx,ny,1), dtype=float)
     x = self.x0 + self.dx*x.flatten()
     y = self.y0 + self.dy*y.flatten()
     z = self.z + z.flatten()
     positions = numpy.array((x,y,z)) # use column_stack, if we decide to switch from (3,n) to (n,3)
     if self.fill_order == 'sequential':
         return positions
     else: # random
         if self.rng is None:
             self.rng = NumpyRNG()
         return self.rng.permutation(positions.T).T
Esempio n. 6
0
class Grid2D(BaseStructure):
    """
    Represents a structure with neurons distributed on a 2D grid.

    Arguments:
        `dx`, `dy`:
            distances between points in the x, y directions.
        `x0`, `y0`:
            coordinates of the starting corner of the grid.
        `z`:
            the z-coordinate of all points in the grid.
        `aspect_ratio`:
            ratio of the number of grid points per side (not the ratio of the
            side lengths, unless ``dx == dy``)
        `fill_order`:
            may be 'sequential' or 'random'
    """
    parameter_names = ("aspect_ratio", "dx", "dy", "x0", "y0", "z", "fill_order")

    def __init__(self, aspect_ratio=1.0, dx=1.0, dy=1.0, x0=0.0, y0=0.0, z=0,
                 fill_order="sequential", rng=None):
        self.aspect_ratio = aspect_ratio
        assert fill_order in ('sequential', 'random')
        self.fill_order = fill_order
        self.rng = rng
        self.dx = dx; self.dy = dy; self.x0 = x0; self.y0 = y0; self.z = z

    def calculate_size(self, n):
        """docstring goes here"""
        nx = math.sqrt(n * self.aspect_ratio)
        if n % nx != 0:
            raise Exception("Invalid size: n=%g, nx=%d" % (n, nx))
        nx = int(round(nx))
        ny = n // nx
        return nx, ny

    def generate_positions(self, n):
        nx, ny = self.calculate_size(n)
        x, y, z = numpy.indices((nx, ny, 1), dtype=float)
        x = self.x0 + self.dx * x.flatten()
        y = self.y0 + self.dy * y.flatten()
        z = self.z + z.flatten()
        positions = numpy.array((x, y, z))  # use column_stack, if we decide to switch from (3,n) to (n,3)
        if self.fill_order == 'sequential':
            return positions
        else:  # random
            if self.rng is None:
                self.rng = NumpyRNG()
            return self.rng.permutation(positions.T).T
    generate_positions.__doc__ = BaseStructure.generate_positions.__doc__
Esempio n. 7
0
 def fixedpre_population_views(self):
     sim.setup(timestep=1.0)
     in_pop = sim.Population(4, sim.SpikeSourceArray([0]), label="in_pop")
     pop = sim.Population(4, sim.IF_curr_exp(), label="pop")
     rng = NumpyRNG(seed=2)
     conn = sim.Projection(in_pop[0:3], pop[1:4],
                           sim.FixedNumberPreConnector(2, rng=rng),
                           sim.StaticSynapse(weight=0.5, delay=2))
     sim.run(1)
     weights = conn.get(['weight', 'delay'], 'list')
     sim.end()
     # The fixed seed means this gives the same answer each time
     target = [(1, 1, 0.5, 2.0), (1, 2, 0.5, 2.0), (1, 3, 0.5, 2.0),
               (2, 1, 0.5, 2.0), (2, 2, 0.5, 2.0), (2, 3, 0.5, 2.0)]
     self.assertEqual(weights.tolist(), target)
Esempio n. 8
0
 def __init__(self,
              n_patterns,
              n_target_spikes,
              n_inputs,
              n_outputs,
              n_epochs,
              seed=None):
     self.n_patterns = n_patterns  # Total no. patterns
     self.n_target_spikes = n_target_spikes  # Desired no. target spikes
     self.n_inputs = n_inputs  # No. afferent spike trains
     self.n_outputs = n_outputs  # No. postsynaptic neurons
     self.n_epochs = n_epochs  # Total no. training epochs
     self.rng = NumpyRNG(seed)  # Random number generator
     # Initial weight bounds based on uniform distrib. (nA)
     self.w_unif_init = (0.0, 200.0 / self.n_inputs)
Esempio n. 9
0
def fixed_number_pre_with_replacement(sim):
    sim.setup()
    p1 = sim.Population(5, sim.IF_cond_exp())
    p2 = sim.Population(7, sim.IF_cond_exp())
    synapse_type1 = sim.StaticSynapse(weight=0.5, delay=0.5)
    connector1 = sim.FixedNumberPreConnector(n=3,
                                             with_replacement=True,
                                             rng=NumpyRNG())
    prj1 = sim.Projection(p1, p2, connector1, synapse_type1)
    print("Projection #1\n", connection_plot(prj1))
    delays = prj1.get('delay', format='list', gather=False)
    assert_equal(len(delays), connector1.n * p2.size)
    weights = prj1.get('weight', format='array', gather=False)
    for column in weights.T:
        column[numpy.isnan(column)] = 0
        assert_equal(column.sum(), 1.5)
def do_run(split, seed=None):
    p.setup(1.0)

    if split:
        p.set_number_of_neurons_per_core(p.SpikeSourcePoisson, 27)
        p.set_number_of_neurons_per_core(p.IF_curr_exp, 22)

    inp = p.Population(100,
                       p.SpikeSourcePoisson(rate=100, seed=seed),
                       label="input")
    pop = p.Population(100, p.IF_curr_exp, {}, label="pop")

    p.Projection(inp,
                 pop,
                 p.OneToOneConnector(),
                 synapse_type=p.StaticSynapse(weight=5))

    pop.record("spikes")
    inp.record("spikes")

    p.run(100)

    inp.set(rate=10)
    # pop.set("cm", 0.25)
    pop.set(tau_syn_E=1)

    p.run(100)

    pop_spikes1 = pop.spinnaker_get_data('spikes')
    inp_spikes1 = inp.spinnaker_get_data('spikes')

    p.reset()

    inp.set(rate=0)
    pop.set(i_offset=1.0)
    vs = p.RandomDistribution("uniform", [-65.0, -55.0],
                              rng=NumpyRNG(seed=seed))
    pop.initialize(v=vs)

    p.run(100)

    pop_spikes2 = pop.spinnaker_get_data('spikes')
    inp_spikes2 = inp.spinnaker_get_data('spikes')

    p.end()

    return (pop_spikes1, inp_spikes1, pop_spikes2, inp_spikes2)
Esempio n. 11
0
def fixed_number_pre_with_replacement_heterogeneous_parameters(sim):
    sim.setup()
    p1 = sim.Population(5, sim.IF_cond_exp())
    p2 = sim.Population(7, sim.IF_cond_exp())
    connector1 = sim.FixedNumberPreConnector(n=3, with_replacement=True, rng=NumpyRNG())
    synapse_type2 = sim.TsodyksMarkramSynapse(weight=lambda d: d, delay=0.5, U=lambda d: 0.02 * d + 0.1)
    #synapse_type2 = sim.TsodyksMarkramSynapse(weight=0.001, delay=0.5, U=lambda d: 2*d+0.1)
    prj2 = sim.Projection(p1, p2, connector1, synapse_type2)
    print("Projection 2")
    x = prj2.get(['weight', 'delay', 'U'], format='list', gather=False)
    from pprint import pprint
    pprint(x)
    i, j, w, d, u = numpy.array(x).T
    assert_arrays_equal(w, abs(i - j))
    assert_arrays_equal(d, 0.5 * numpy.ones(p2.size * connector1.n))
    assert_arrays_equal(u, 0.02 * abs(i - j) + 0.1)
    sim.end()
Esempio n. 12
0
def scenario4(sim):
    """
    Network with spatial structure
    """
    init_logging(logfile=None, debug=True)
    sim.setup()
    rng = NumpyRNG(seed=76454, parallel_safe=False)

    input_layout = RandomStructure(boundary=Cuboid(width=500.0, height=500.0, depth=100.0),
                                   origin=(0, 0, 0), rng=rng)
    inputs = sim.Population(100, sim.SpikeSourcePoisson(rate=RandomDistribution('uniform', [3.0, 7.0], rng=rng)),
                            structure=input_layout, label="inputs")
    output_layout = Grid3D(aspect_ratioXY=1.0, aspect_ratioXZ=5.0, dx=10.0, dy=10.0, dz=10.0,
                           x0=0.0, y0=0.0, z0=200.0)
    outputs = sim.Population(200, sim.EIF_cond_exp_isfa_ista(),
                             initial_values = {'v': RandomDistribution('normal', [-65.0, 5.0], rng=rng),
                                               'w': RandomDistribution('normal', [0.0, 1.0], rng=rng)},
                             structure=output_layout, # 10x10x2 grid
                             label="outputs")
    logger.debug("Output population positions:\n %s", outputs.positions)
    DDPC = sim.DistanceDependentProbabilityConnector
    input_connectivity = DDPC("0.5*exp(-d/100.0)", rng=rng)
    recurrent_connectivity = DDPC("sin(pi*d/250.0)**2", rng=rng)
    depressing = sim.TsodyksMarkramSynapse(weight=RandomDistribution('normal', (0.1, 0.02), rng=rng),
                                           delay="0.5 + d/100.0",
                                           U=0.5, tau_rec=800.0, tau_facil=0.0)
    facilitating = sim.TsodyksMarkramSynapse(weight=0.05,
                                             delay="0.2 + d/100.0",
                                             U=0.04, tau_rec=100.0,
                                             tau_facil=1000.0)
    input_connections = sim.Projection(inputs, outputs, input_connectivity,
                                       receptor_type='excitatory',
                                       synapse_type=depressing,
                                       space=Space(axes='xy'),
                                       label="input connections")
    recurrent_connections = sim.Projection(outputs, outputs, recurrent_connectivity,
                                           receptor_type='inhibitory',
                                           synapse_type=facilitating,
                                           space=Space(periodic_boundaries=((-100.0, 100.0), (-100.0, 100.0), None)), # should add "calculate_boundaries" method to Structure classes
                                           label="recurrent connections")
    outputs.record('spikes')
    outputs.sample(10, rng=rng).record('v')
    sim.run(1000.0)
    data = outputs.get_data()
    sim.end()
    return data
Esempio n. 13
0
def initialize():
    global sim
    global options
    global extra
    global rngseed
    global parallel_safe
    global rng
    global n_ext
    global n_exc
    global n_inh

    sim, options = get_simulator(
        ("--plot-figure", "Plot the connections to a file."))

    init_logging(None, debug=True)

    # === General parameters =================================================

    threads = 1
    rngseed = 98765
    parallel_safe = True
    rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)

    # === general network parameters (except connections) ====================

    n_ext = 60  # number of external stimuli
    n_exc = 60  # number of excitatory cells
    n_inh = 60  # number of inhibitory cells

    # === Options ============================================================

    extra = {
        'loglevel': 2,
        'useSystemSim': True,
        'maxNeuronLoss': 0.,
        'maxSynapseLoss': 0.4,
        'hardwareNeuronSize': 8,
        'threads': threads,
        'filename': "connections.xml",
        'label': 'VA'
    }
    if sim.__name__ == "pyNN.hardware.brainscales":
        extra['hardware'] = sim.hardwareSetup['small']

    if options.simulator == "neuroml":
        extra["file"] = "connections.xml"
Esempio n. 14
0
    def __init__(self, mean, stdev, dt=None, start=0.0, stop=None, rng=None):
        """Construct the current source.

        Required arguments:
            mean  -- mean current amplitude in nA
            stdev -- standard deviation of the current amplitude in nA

        Optional arguments:
            dt    -- interval between updates of the current amplitude. Must be
                     a multiple of the simulation time step. If not specified,
                     the simulation time step will be used.
            start -- onset of the current injection in ms. If not specified, the
                     current will begin at the start of the simulation.
            stop  -- end of the current injection in ms. If not specified, the
                     current will continue until the end of the simulation.
            rng   -- an RNG object from the `pyNN.random` module. For speed,
                     this should be a `NativeRNG` instance (uses the simulator's
                     internal random number generator). For reproducibility
                     across simulators, use one of the other RNG types. If not
                     specified, a NumpyRNG is used.
        """
        self.rng = rng or NumpyRNG()
        self.dt = dt or state.dt
        if dt:
            assert self.dt % dt == 0
        self.start = start
        self.stop = stop
        self.mean = mean
        self.stdev = stdev
        if isinstance(rng, NativeRNG):
            self._device = nest.Create('noise_generator')
            nest.SetStatus(
                self._device, {
                    'mean': mean * 1000.0,
                    'std': stdev * 1000.0,
                    'start': self.delay_correction(start),
                    'dt': self.dt
                })
            if stop:
                nest.SetStatus(self._device,
                               {'stop': self.delay_correction(stop)})
        else:
            raise NotImplementedError(
                "Only using a NativeRNG is currently supported.")
Esempio n. 15
0
def unif(n_spikes, T, rng=NumpyRNG(), rounding=False, t_min=1.0, min_isi=10.0):
    """
    Generate uniformally distributed spikes between [t_min, T),
    with minimum inter-spike separation and optional rounding

    pyNN.nest is generally unstable with rounding for input spikes
    pyNN.nest errors if lowest spike value is exactly equal to dt

    Input spikes between 0.0 and dt are not integrated over

    Parameters
    ----------
    n_spikes : int
    T : float
        Time interval (ms)
    t_min : float
        Lower bound on generated time value (ms)
    min_isi : float
        Minimum inter-spike separation : n_spikes*MIN_ISI << T (default 10 ms)

    Returns
    -------
    spike_times : pyNN.parameters.Sequence (float64)

    """

    spike_times = np.empty([0], dtype=float)
    while spike_times.size < n_spikes:
        timing = rng.uniform(t_min, T)

        # Ensure minimum separation w.r.t. existing spikes
        if (spike_times.size > 0
                and np.min(np.abs(timing - spike_times)) < min_isi):
            continue
        else:
            spike_times = np.append(spike_times, timing)

    spike_times.sort()

    if rounding:
        return Sequence(np.floor(spike_times))
    else:
        return Sequence(spike_times)
Esempio n. 16
0
def fixed_number_post_with_replacement(sim):
    sim.setup()
    p1 = sim.Population(5, sim.IF_cond_exp())
    p2 = sim.Population(7, sim.IF_cond_exp())
    synapse_type1 = sim.StaticSynapse(weight=0.5, delay=0.5)
    connector1 = sim.FixedNumberPostConnector(n=9, with_replacement=True, rng=NumpyRNG())
    prj1 = sim.Projection(p1, p2, connector1, synapse_type1)
    print("Projection #1\n", connection_plot(prj1))
    delays = prj1.get('delay', format='list', gather=False)
    assert_equal(len(delays), connector1.n * p1.size)
    weights = prj1.get('weight', format='array', gather=False)
    for row in weights:
        row[np.isnan(row)] = 0
        assert_equal(row.sum(), 4.5)

    weights2 = prj1.get('weight', format='array', gather=False, multiple_synapses='min')
    for row in weights2:
        n_nan = np.isnan(row).sum()
        row[np.isnan(row)] = 0
        assert_equal(row.sum(), (row.size - n_nan)*0.5)
Esempio n. 17
0
 def _generate_random_values(
         self, values, n_connections, pre_vertex_slice, post_vertex_slice):
     """
     :param ~pyNN.random.NumpyRNG values:
     :param int n_connections:
     :param ~pacman.model.graphs.common.Slice pre_vertex_slice:
     :param ~pacman.model.graphs.common.Slice post_vertex_slice:
     :rtype: ~numpy.ndarray
     """
     key = (id(pre_vertex_slice), id(post_vertex_slice), id(values))
     seed = self.__param_seeds.get(key, None)
     if seed is None:
         seed = int(values.rng.next() * 0x7FFFFFFF)
         self.__param_seeds[key] = seed
     new_rng = NumpyRNG(seed)
     copy_rd = RandomDistribution(
         values.name, parameters_pos=None, rng=new_rng,
         **values.parameters)
     if n_connections == 1:
         return numpy.array([copy_rd.next(1)], dtype="float64")
     return copy_rd.next(n_connections)
Esempio n. 18
0
    def __init__(self, connector, pre_population, post_population,
                 prepop_is_view, postpop_is_view, rng,
                 synapse_dynamics, synapse_type, is_virtual_machine,
                 weights=None, delays=None):
        """
        :param AbstractConnector connector:
            The connector connected to the synapse
        :param pre_population: The population sending spikes to the synapse
        :type pre_population: ~spynnaker.pyNN.models.populations.Population or
            ~spynnaker.pyNN.models.populations.PopulationView
        :param post_population: The population hosting the synapse
        :type post_population: ~spynnaker.pyNN.models.populations.Population
            or ~spynnaker.pyNN.models.populations.PopulationView
        :param bool prepop_is_view: Whether the ``pre_population`` is a view
        :param bool postpop_is_view: Whether the ``post_population`` is a view
        :param rng: Seeded random number generator
        :type rng: ~pyNN.random.NumpyRNG or None
        :param AbstractSynapseDynamics synapse_dynamics:
            The dynamic behaviour of the synapse
        :param int synapse_type: The type of the synapse
        :param bool is_virtual_machine: Whether the machine is virtual
        :param weights: The synaptic weights
        :type weights: float or list(float) or ~numpy.ndarray(float) or None
        :param delays: The total synaptic delays
        :type delays: float or list(float) or ~numpy.ndarray(float) or None
        """
        self.__connector = connector
        self.__pre_population = pre_population
        self.__post_population = post_population
        self.__prepop_is_view = prepop_is_view
        self.__postpop_is_view = postpop_is_view
        self.__rng = (rng or NumpyRNG())
        self.__synapse_dynamics = synapse_dynamics
        self.__synapse_type = synapse_type
        self.__weights = weights
        self.__delays = delays
        self.__is_virtual_machine = is_virtual_machine

        # Make a list of holders to be updated
        self.__pre_run_connection_holders = list()
Esempio n. 19
0
class param:
    seed = 8658764  # Seed for reproduction of random number
    rng = NumpyRNG()  # Use seed to reproduce
    input_nr = 9  # Number of input neurons
    readout_nr = 2  # Number of readout neurons
    reservoir_nr = 50  # Number of reservour neurons
    simulation_time = 19.0  # Simulation time for each input
    dt = 1  # Timestep in simulation
    res_pconn = 0.1  # sparse connection probability for reservoir
    images_train_nr = 9  # Number of training images to train with,
    # Must be a factor of 3
    images_test_nr = 9  # Number of test images
    images_train = generate_labeledImages(images_train_nr)
    images_test = generate_labeledImages(images_test_nr)

    # If network uses excitatory and inhibatory neurons
    res_exc_nr = int(math.ceil(reservoir_nr *
                               0.8))  # Number of excitatory neurons
    res_inh_nr = int(math.floor(reservoir_nr *
                                0.2))  # Number of inhibitory neurons

    print('exc:', res_exc_nr)
Esempio n. 20
0
 def test_initial_value(self):
     sim.setup(timestep=1.0)
     pop = sim.Population(5, sim.IF_curr_exp(), label="pop_1")
     self.assertEqual([-65, -65, -65, -65, -65], pop.get_initial_value("v"))
     view = PopulationView(pop, [1, 3], label="Odds")
     view2 = PopulationView(pop, [1, 2], label="OneTwo")
     view_iv = view.initial_values
     self.assertEqual(1, len(view_iv))
     self.assertEqual([-65, -65], view_iv["v"])
     view.initialize(v=-60)
     self.assertEqual([-65, -60, -65, -60, -65], pop.get_initial_value("v"))
     self.assertEqual([-60, -60], view.initial_values["v"])
     self.assertEqual([-60, -65], view2.initial_values["v"])
     rand_distr = RandomDistribution("uniform",
                                     parameters_pos=[-65.0, -55.0],
                                     rng=NumpyRNG(seed=85524))
     view.initialize(v=rand_distr)
     self.assertEqual([-64.43349869042906, -63.663421790102184],
                      view.initial_values["v"])
     view.initialize(v=lambda i: -65 + i / 10.0)
     self.assertEqual([-64.9, -64.7], view.initial_values["v"])
     sim.end()
Esempio n. 21
0
def obtain_synapses(wiring_plan):

    rng = NumpyRNG(seed=64754)
    delay_distr = RandomDistribution('normal', [2, 1e-1], rng=rng)
    weight_distr = RandomDistribution('normal', [45, 1e-1], rng=rng)


    flat_iter = [ (i,j,k,xaxis) for i,j in enumerate(filtered) for k,xaxis in enumerate(j) ]
    index_exc = list(set( source for (source,j,target,xaxis) in flat_iter if xaxis==1 or xaxis == 2 ))
    index_inh = list(set( source for (source,j,target,xaxis) in flat_iter if xaxis==-1 or xaxis == -2 ))

    EElist = []
    IIlist = []
    EIlist = []
    IElist = []
    for (source,j,target,xaxis) in flat_iter:
        delay = delay_distr.next()
        weight = 1.0 # will be updated later.
        if xaxis==1 or xaxis == 2:
            if target in index_inh:
                EIlist.append((source,target,delay,weight))
            else:
                EElist.append((source,target,delay,weight))

        if xaxis==-1 or xaxis == -2:
            if target in index_exc:
                IElist.append((source,target,delay,weight))
            else:
                IIlist.append((source,target,delay,weight))

    conn_ee = sim.FromListConnector(EElist)
    conn_ie = sim.FromListConnector(IElist)
    conn_ei = sim.FromListConnector(EIlist)
    conn_ii = sim.FromListConnector(IIlist)

    return (conn_ee, conn_ie, conn_ei, conn_ii,index_exc,index_inh)
Esempio n. 22
0
                           celltype(**cell_params),
                           label="Excitatory_Cells")
inh_cells = sim.Population(n_inh,
                           celltype(**cell_params),
                           label="Inhibitory_Cells")
if options.benchmark == "COBA":
    ext_stim = sim.Population(20,
                              sim.SpikeSourcePoisson(rate=rate,
                                                     duration=stim_dur),
                              label="expoisson")
    rconn = 0.01
    ext_conn = sim.FixedProbabilityConnector(rconn)
    ext_syn = sim.StaticSynapse(weight=0.1)

print "%s Initialising membrane potential to random values..." % node_id
rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)
uniformDistr = RandomDistribution('uniform', [v_reset, v_thresh], rng=rng)
exc_cells.initialize(v=uniformDistr)
inh_cells.initialize(v=uniformDistr)

print "%s Connecting populations..." % node_id
progress_bar = ProgressBar(width=20)
connector = sim.FixedProbabilityConnector(pconn,
                                          rng=rng,
                                          callback=progress_bar)
exc_syn = sim.StaticSynapse(weight=w_exc, delay=delay)
inh_syn = sim.StaticSynapse(weight=w_inh, delay=delay)

connections = {}
connections['e2e'] = sim.Projection(exc_cells,
                                    exc_cells,
Esempio n. 23
0
import spynnaker8 as sim
import spynnaker8.spynnaker_plotting as splot
import pyNN.utility.plotting as plot
import matplotlib.pyplot as plt
from pyNN.random import RandomDistribution, NumpyRNG

# Remove random effect for testing
# Set to None to randomize
rng = NumpyRNG(seed=1)

# Choose the number of neurons to be simulated in the network.
n_neurons = 100
n_exc = int(round(n_neurons * 0.8))
n_inh = int(round(n_neurons * 0.2))
simtime = 5000

# Set up the simulation to use 0.1ms timesteps.
sim.setup(timestep=0.1)

# Create an excitatory population with 80% of the neurons and
# an inhibitory population with 20% of the neurons.
pop_exc = sim.Population(n_exc, sim.IF_curr_exp(), label="Excitatory")
pop_inh = sim.Population(n_inh, sim.IF_curr_exp(), label="Inhibitory")

# Create excitatory poisson stimulation population with 80%
#    of the neurons and
# an inhibitory poisson stimulation population with 20% of the neurons,
#  both with a rate of 1000Hz
# TODO RATE?
if rng is None:
    seed = None
Esempio n. 24
0
def do_run(seed=None):

    random.seed(seed)
    # SpiNNaker setup
    sim.setup(timestep=1.0, min_delay=1.0, max_delay=10.0)

    # +-------------------------------------------------------------------+
    # | General Parameters                                                |
    # +-------------------------------------------------------------------+

    # Population parameters
    model = sim.IF_curr_exp

    cell_params = {
        'cm': 0.25,
        'i_offset': 0.0,
        'tau_m': 10.0,
        'tau_refrac': 2.0,
        'tau_syn_E': 2.5,
        'tau_syn_I': 2.5,
        'v_reset': -70.0,
        'v_rest': -65.0,
        'v_thresh': -55.4
    }

    # Other simulation parameters
    e_rate = 200
    in_rate = 350

    n_stim_test = 5
    n_stim_pairing = 10
    dur_stim = 20

    pop_size = 40

    ISI = 150.
    start_test_pre_pairing = 200.
    start_pairing = 1500.
    start_test_post_pairing = 700.

    simtime = start_pairing + start_test_post_pairing + \
        ISI*(n_stim_pairing + n_stim_test) + 550.  # let's make it 5000

    # Initialisations of the different types of populations
    IAddPre = []
    IAddPost = []

    # +-------------------------------------------------------------------+
    # | Creation of neuron populations                                    |
    # +-------------------------------------------------------------------+

    # Neuron populations
    pre_pop = sim.Population(pop_size, model(**cell_params))
    post_pop = sim.Population(pop_size, model(**cell_params))

    # Test of the effect of activity of the pre_pop population on the post_pop
    # population prior to the "pairing" protocol : only pre_pop is stimulated
    for i in range(n_stim_test):
        IAddPre.append(
            sim.Population(
                pop_size,
                sim.SpikeSourcePoisson(rate=in_rate,
                                       start=start_test_pre_pairing + ISI *
                                       (i),
                                       duration=dur_stim,
                                       seed=random.randint(0, 100000000))))

    # Pairing protocol : pre_pop and post_pop are stimulated with a 10 ms
    # difference
    for i in range(n_stim_pairing):
        IAddPre.append(
            sim.Population(
                pop_size,
                sim.SpikeSourcePoisson(rate=in_rate,
                                       start=start_pairing + ISI * (i),
                                       duration=dur_stim,
                                       seed=random.randint(0, 100000000))))
        IAddPost.append(
            sim.Population(
                pop_size,
                sim.SpikeSourcePoisson(rate=in_rate,
                                       start=start_pairing + ISI * (i) + 10.,
                                       duration=dur_stim,
                                       seed=random.randint(0, 100000000))))

    # Test post pairing : only pre_pop is stimulated
    # (and should trigger activity in Post)
    for i in range(n_stim_test):
        start = start_pairing + ISI * n_stim_pairing + \
                start_test_post_pairing + ISI * i
        IAddPre.append(
            sim.Population(
                pop_size,
                sim.SpikeSourcePoisson(rate=in_rate,
                                       start=start,
                                       duration=dur_stim,
                                       seed=random.randint(0, 100000000))))

    # Noise inputs
    INoisePre = sim.Population(pop_size,
                               sim.SpikeSourcePoisson(rate=e_rate,
                                                      start=0,
                                                      duration=simtime,
                                                      seed=random.randint(
                                                          0, 100000000)),
                               label="expoisson")
    INoisePost = sim.Population(pop_size,
                                sim.SpikeSourcePoisson(rate=e_rate,
                                                       start=0,
                                                       duration=simtime,
                                                       seed=random.randint(
                                                           0, 100000000)),
                                label="expoisson")

    # +-------------------------------------------------------------------+
    # | Creation of connections                                           |
    # +-------------------------------------------------------------------+

    # Connection parameters
    JEE = 3.

    # Connection type between noise poisson generator and
    # excitatory populations
    ee_connector = sim.OneToOneConnector()

    # Noise projections
    sim.Projection(INoisePre,
                   pre_pop,
                   ee_connector,
                   receptor_type='excitatory',
                   synapse_type=sim.StaticSynapse(weight=JEE * 0.05))
    sim.Projection(INoisePost,
                   post_pop,
                   ee_connector,
                   receptor_type='excitatory',
                   synapse_type=sim.StaticSynapse(weight=JEE * 0.05))

    # Additional Inputs projections
    for i in range(len(IAddPre)):
        sim.Projection(IAddPre[i],
                       pre_pop,
                       ee_connector,
                       receptor_type='excitatory',
                       synapse_type=sim.StaticSynapse(weight=JEE * 0.05))
    for i in range(len(IAddPost)):
        sim.Projection(IAddPost[i],
                       post_pop,
                       ee_connector,
                       receptor_type='excitatory',
                       synapse_type=sim.StaticSynapse(weight=JEE * 0.05))

    # Plastic Connections between pre_pop and post_pop
    stdp_model = sim.STDPMechanism(
        timing_dependence=sim.SpikePairRule(tau_plus=20.,
                                            tau_minus=50.0,
                                            A_plus=0.02,
                                            A_minus=0.02),
        weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=0.9))

    rng = NumpyRNG(seed=seed, parallel_safe=True)
    plastic_projection = \
        sim.Projection(pre_pop, post_pop, sim.FixedProbabilityConnector(
            p_connect=0.5, rng=rng), synapse_type=stdp_model)

    # +-------------------------------------------------------------------+
    # | Simulation and results                                            |
    # +-------------------------------------------------------------------+

    # Record spikes and neurons' potentials
    pre_pop.record(['v', 'spikes'])
    post_pop.record(['v', 'spikes'])

    # Run simulation
    sim.run(simtime)

    weights = plastic_projection.get('weight', 'list')

    pre_spikes = neo_convertor.convert_spikes(pre_pop.get_data('spikes'))
    post_spikes = neo_convertor.convert_spikes(post_pop.get_data('spikes'))

    # End simulation on SpiNNaker
    sim.end()

    return (pre_spikes, post_spikes, weights)
def do_run(plot):

    p.setup(timestep=1.0)

    cell_params_lif = {
        'cm': 0.25,
        'i_offset': 0.0,
        'tau_m': 20.0,
        'tau_refrac': 2.0,
        'tau_syn_E': 5.0,
        'tau_syn_I': 5.0,
        'v_reset': -70.0,
        'v_rest': -65.0,
        'v_thresh': -50.0
    }

    def create_grid(n, label, dx=1.0, dy=1.0):
        grid_structure = p.Grid2D(dx=dx, dy=dy, x0=0.0, y0=0.0)
        return p.Population(n * n,
                            p.IF_curr_exp(**cell_params_lif),
                            structure=grid_structure,
                            label=label)

    # Parameters
    n = 5
    weight_to_spike = 2.0
    delay = 2
    runtime = 1000
    p.set_number_of_neurons_per_core(p.IF_curr_exp, 100)

    # Network population
    small_world = create_grid(n, 'small_world')

    # SpikeInjector
    injectionConnection = [(0, 0)]
    spikeArray = {'spike_times': [[0]]}
    inj_pop = p.Population(1,
                           p.SpikeSourceArray(**spikeArray),
                           label='inputSpikes_1')

    # Injector projection
    p.Projection(inj_pop, small_world,
                 p.FromListConnector(injectionConnection),
                 p.StaticSynapse(weight=weight_to_spike, delay=delay))

    # Connectors
    degree = 2.0
    rewiring = 0.4
    rng = NumpyRNG(seed=1)

    small_world_connector = p.SmallWorldConnector(degree, rewiring, rng=rng)

    # Projection for small world grid
    sw_pro = p.Projection(small_world, small_world, small_world_connector,
                          p.StaticSynapse(weight=2.0, delay=5))

    small_world.record(['v', 'spikes'])

    p.run(runtime)

    v = small_world.get_data('v')
    spikes = small_world.get_data('spikes')
    weights = sw_pro.get('weight', 'list')
    if plot:
        # pylint: disable=no-member
        Figure(
            # raster plot of the presynaptic neuron spike times
            Panel(spikes.segments[0].spiketrains,
                  yticks=True,
                  markersize=0.2,
                  xlim=(0, runtime),
                  xticks=True),
            # membrane potential of the postsynaptic neuron
            Panel(v.segments[0].filter(name='v')[0],
                  ylabel="Membrane potential (mV)",
                  data_labels=[small_world.label],
                  yticks=True,
                  xlim=(0, runtime),
                  xticks=True),
            title="Simple small world connector",
            annotations="Simulated with {}".format(p.name()))
        plt.show()

    p.end()

    return v, spikes, weights
Esempio n. 26
0
def do_run(nNeurons):

    p.setup(timestep=1.0, min_delay=1.0, max_delay=32.0)

    p.set_number_of_neurons_per_core("IF_curr_exp", 100)

    cm = list()
    i_off = list()
    tau_m = list()
    tau_re = list()
    tau_syn_e = list()
    tau_syn_i = list()
    v_reset = list()
    v_rest = list()
    v_thresh = list()

    cell_params_lif = {'cm': cm, 'i_offset': i_off, 'tau_m': tau_m,
                       'tau_refrac': tau_re, 'tau_syn_E': tau_syn_e,
                       'tau_syn_I': tau_syn_i, 'v_reset': v_reset,
                       'v_rest': v_rest, 'v_thresh': v_thresh}

    for atom in range(0, nNeurons):
        cm.append(0.25)
        i_off.append(0.0)
        tau_m.append(10.0)
        tau_re.append(2.0)
        tau_syn_e.append(0.5)
        tau_syn_i.append(0.5)
        v_reset.append(-65.0)
        v_rest.append(-65.0)
        v_thresh.append(-64.4)

    gbar_na_distr = RandomDistribution('normal', (20.0, 2.0),
                                       rng=NumpyRNG(seed=85524))

    cell_params_lif = {'cm': cm, 'i_offset': i_off, 'tau_m': tau_m,
                       'tau_refrac': tau_re, 'tau_syn_E': tau_syn_e,
                       'tau_syn_I': tau_syn_i, 'v_reset': v_reset,
                       'v_rest': v_rest, 'v_thresh': v_thresh}

    populations = list()
    projections = list()

    weight_to_spike = 2
    delay = 1

    connections = list()
    for i in range(0, nNeurons):
        singleConnection = (i, ((i + 1) % nNeurons), weight_to_spike, delay)
        connections.append(singleConnection)

    injectionConnection = [(0, 0, weight_to_spike, delay)]
    spikeArray = {'spike_times': [[0]]}
    populations.append(p.Population(nNeurons, p.IF_curr_exp, cell_params_lif,
                                    label='pop_1'))
    populations.append(p.Population(1, p.SpikeSourceArray, spikeArray,
                                    label='inputSpikes_1'))

    populations[0].set({'cm': 0.25})
    populations[0].set('cm', cm)
    populations[0].set({'tau_m': tau_m, 'v_thresh': v_thresh})
    populations[0].set('i_offset', gbar_na_distr)
    populations[0].set('i_offset', i_off)

    projections.append(p.Projection(populations[0], populations[0],
                                    p.FromListConnector(connections)))
    projections.append(p.Projection(populations[1], populations[0],
                                    p.FromListConnector(injectionConnection)))

    populations[0].record_v()
    populations[0].record_gsyn()
    populations[0].record()

    p.run(100)

    v = populations[0].get_v(compatible_output=True)
    gsyn = populations[0].get_gsyn(compatible_output=True)
    spikes = populations[0].getSpikes(compatible_output=True)

    p.end()

    return (v, gsyn, spikes)
Esempio n. 27
0
dfEE.loc[0].keys()
dfm = dfEE.as_matrix()

rcls = dfm[:, :1]  # real cell labels.
rcls = rcls[1:]
rcls = {k: v
        for k, v in enumerate(rcls)}  # real cell labels, cast to dictionary
import pickle
with open('cell_names.p', 'wb') as f:
    pickle.dump(rcls, f)
import pandas as pd
pd.DataFrame(rcls).to_csv('cell_names.csv', index=False)

filtered = dfm[:, 3:]
filtered = filtered[1:]
rng = NumpyRNG(seed=64754)
delay_distr = RandomDistribution('normal', [45, 1e-1], rng=rng)

index_exc = [i for i, d in enumerate(dfm) if '+' in d[0]]
index_inh = [i for i, d in enumerate(dfm) if '-' in d[0]]

EElist = []
IIlist = []
EIlist = []
IElist = []

for i, j in enumerate(filtered):
    for k, xaxis in enumerate(j):
        if xaxis == 1 or xaxis == 2:
            source = i
            target = k
Esempio n. 28
0
class Grid2D(BaseStructure):
    """
    Represents a structure with neurons distributed on a 2D grid.

    Arguments:
        `dx`, `dy`:
            distances between points in the x, y directions.
        `x0`, `y0`:
            coordinates of the starting corner of the grid.
        `z`:
            the z-coordinate of all points in the grid.
        `aspect_ratio`:
            ratio of the number of grid points per side (not the ratio of the
            side lengths, unless ``dx == dy``)
        `fill_order`:
            may be 'sequential' or 'random'
    """
    parameter_names = ("aspect_ratio", "dx", "dy", "x0", "y0", "z",
                       "fill_order")

    def __init__(self,
                 aspect_ratio=1.0,
                 dx=1.0,
                 dy=1.0,
                 x0=0.0,
                 y0=0.0,
                 z=0,
                 fill_order="sequential",
                 rng=None):
        self.aspect_ratio = aspect_ratio
        assert fill_order in ('sequential', 'random')
        self.fill_order = fill_order
        self.rng = rng
        self.dx = dx
        self.dy = dy
        self.x0 = x0
        self.y0 = y0
        self.z = z

    def calculate_size(self, n):
        """docstring goes here"""
        nx = math.sqrt(n * self.aspect_ratio)
        if n % nx != 0:
            raise Exception("Invalid size: n=%g, nx=%d" % (n, nx))
        ny = n / nx
        return nx, ny

    def generate_positions(self, n):
        nx, ny = self.calculate_size(n)
        x, y, z = numpy.indices((nx, ny, 1), dtype=float)
        x = self.x0 + self.dx * x.flatten()
        y = self.y0 + self.dy * y.flatten()
        z = self.z + z.flatten()
        positions = numpy.array((
            x, y,
            z))  # use column_stack, if we decide to switch from (3,n) to (n,3)
        if self.fill_order == 'sequential':
            return positions
        else:  # random
            if self.rng is None:
                self.rng = NumpyRNG()
            return self.rng.permutation(positions.T).T

    generate_positions.__doc__ = BaseStructure.generate_positions.__doc__
Esempio n. 29
0
"""

import socket, os
from importlib import import_module
import numpy
from pyNN.utility import get_script_args, init_logging, normalized_filename

simulator_name = get_script_args(1)[0]
sim = import_module("pyNN.%s" % simulator_name)

from pyNN.random import NumpyRNG, RandomDistribution

init_logging(None, debug=True)

seed = 764756387
rng = NumpyRNG(seed=seed, parallel_safe=True)
tstop = 1000.0  # ms
input_rate = 100.0  # Hz
cell_params = {
    "tau_refrac": 2.0,  # ms
    "v_thresh": -50.0,  # mV
    "tau_syn_E": 2.0,  # ms
    "tau_syn_I": 2.0,  # ms
    "tau_m": RandomDistribution("uniform", low=18.0, high=22.0, rng=rng),
}
n_record = 3

node = sim.setup(timestep=0.025, min_delay=1.0, max_delay=1.0, debug=True, quit_on_end=False)
print "Process with rank %d running on %s" % (node, socket.gethostname())

print "[%d] Creating populations" % node
Esempio n. 30
0
import numpy
from pyNN.utility import get_script_args
simulator_name = get_script_args(1)[0]
exec("from pyNN.%s import *" % simulator_name)
from pyNN.random import NumpyRNG

setup(timestep=0.01, min_delay=2.0, max_delay=4.0)

ifcell = create(IF_curr_exp,{'i_offset' :   0.1, 'tau_refrac' : 3.0,
                             'v_thresh' : -51.0, 'tau_syn_E'  : 2.0,
                             'tau_syn_I':  5.0,  'v_reset'    : -70.0})
input_rate = 200.0
simtime = 1000.0
seed = 240965239

rng = NumpyRNG(seed=seed)
n_spikes = input_rate*simtime/1000.0
spike_times = numpy.add.accumulate(rng.next(n_spikes, 'exponential', [1000.0/input_rate]))

spike_source = create(SpikeSourceArray(spike_times=spike_times))


conn = connect(spike_source, ifcell, weight=1.5, receptor_type='excitatory', delay=2.0)

record(('spikes', 'v'), ifcell, "Results/IF_curr_exp2_%s.pkl" % simulator_name)
initialize(ifcell, v=-53.2)

run(simtime)

end()
Esempio n. 31
0
import pyNN.brian as sim
from pyNN.random import RandomDistribution, NumpyRNG

Exc_in = 32
Inh_in = 32
noSpikes = 20  # number of spikes per chanel per simulation run
stimSpikes = RandomDistribution(
    'uniform', low=0, high=500.0, rng=NumpyRNG(seed=72386)
).next(
    [Exc_in + Inh_in, noSpikes]
)  # generate a time uniform distributed signal with Exc_in + Inh_in chanels and noSpikes for each chanel
# print stimSpikes

for i in range(Exc_in):
    if i == 0:
        Excinp = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i, :]))
    else:
        spike_source = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i, :]))
        Excinp = Excinp + spike_source

for i in range(Inh_in):
    if i == 0:
        Inhinp = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i + Exc_in, :]))
    else:
        spike_source = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i + Exc_in, :]))
        Inhinp = Inhinp + spike_source
Esempio n. 32
0
 def __init__(self, boundary, origin=(0.0, 0.0, 0.0), rng=None):
     assert isinstance(boundary, Shape)
     assert len(origin) == 3
     self.boundary = boundary
     self.origin = origin
     self.rng = rng or NumpyRNG()
Esempio n. 33
0
def setupNetwork():
    node = pynn.setup(timestep=0.1,
                      min_delay=1.0,
                      max_delay=1.0,
                      debug=True,
                      quit_on_end=False)
    print "Process with rank %d running on %s" % (node, socket.gethostname())

    rng = NumpyRNG(seed=seed, parallel_safe=True)

    print "[%d] Creating populations" % node
    # 1) create excitatory populations
    l0_exc_population = pynn.Population(num['l0_exc_neurons'],
                                        native_cell_type('aeif_cond_exp'),
                                        cell_params_adex,
                                        label="exc0")
    l0_exc_population.record()
    l0_exc_population.record_v()
    l1_exc_population = pynn.Population(num['l1_exc_neurons'],
                                        pynn.IF_cond_exp,
                                        cell_params,
                                        label="exc1")
    l1_exc_population.record()

    # 2) create inhibitory population
    l0_inh_population = pynn.Population(num['l0_inh_neurons'],
                                        pynn.IF_cond_exp,
                                        cell_params,
                                        label="inh0")
    l0_inh_population.record()
    l1_inh_population = pynn.Population(num['l1_inh_neurons'],
                                        pynn.IF_cond_exp,
                                        cell_params,
                                        label="inh1")
    l1_inh_population.record()

    # 3) exc0 -> inh0
    inh_connector = pynn.FixedProbabilityConnector(p_exc0_inh0,
                                                   weights=w_exc0_inh0)
    l0_exc_inh_projection = pynn.Projection(l0_exc_population,
                                            l0_inh_population, inh_connector)

    # 4) exc1 -> inh1
    inh_connector = pynn.FixedProbabilityConnector(p_exc1_inh1,
                                                   weights=w_exc1_inh1)
    l1_exc_inh_projection = pynn.Projection(l1_exc_population,
                                            l1_inh_population, inh_connector)

    # 5) exc0 -> exc0
    exc_connector = pynn.AllToAllConnector(weights=0.0)
    l0_exc_exc_projection = pynn.Projection(l0_exc_population,
                                            l0_exc_population, exc_connector)
    exc0_exc0_weights = l0_exc_exc_projection.getWeights()

    exc0_exc0_weights = connect_gauss(num['l0_exc_neurons'],
                                      num['l0_exc_neurons'], sigma_exc0_exc0,
                                      w_exc0_exc0_max,
                                      num['l0_exc_maxneighbors'],
                                      exc0_exc0_weights, True)

    l0_exc_exc_projection.setWeights(exc0_exc0_weights)

    # 6) exc1 -> exc1
    exc_connector = pynn.AllToAllConnector(weights=0.0)
    l1_exc_exc_projection = pynn.Projection(l1_exc_population,
                                            l1_exc_population, exc_connector)
    exc1_exc1_weights = l1_exc_exc_projection.getWeights()

    exc1_exc1_weights = connect_gauss(num['l1_exc_neurons'],
                                      num['l1_exc_neurons'], sigma_exc1_exc1,
                                      w_exc1_exc1_max,
                                      num['l1_exc_maxneighbors'],
                                      exc1_exc1_weights, True)

    l1_exc_exc_projection.setWeights(exc1_exc1_weights)

    # 7) inh0 -> exc0
    connector = pynn.FixedProbabilityConnector(p_inh0_exc0,
                                               weights=w_inh0_exc0)
    l0_inh_exc_projection = pynn.Projection(l0_inh_population,
                                            l0_exc_population,
                                            connector,
                                            target="inhibitory")

    # 8) inh1 -> exc1
    connector = pynn.FixedProbabilityConnector(p_inh1_exc1,
                                               weights=w_inh1_exc1)
    l1_inh_exc_projection = pynn.Projection(l1_inh_population,
                                            l1_exc_population,
                                            connector,
                                            target="inhibitory")

    # 9) create input population
    input_population = pynn.Population(num['inputs'],
                                       pynn.SpikeSourcePoisson,
                                       {'rate': input_rate},
                                       label="input")
    input_population.record()

    # 10) input -> exc0
    stdp_model = pynn.STDPMechanism(
        timing_dependence=pynn.SpikePairRule(tau_plus=10.0, tau_minus=15.0),
        weight_dependence=pynn.AdditiveWeightDependence(w_min=0,
                                                        w_max=w_inp_exc0_max,
                                                        A_plus=0.012,
                                                        A_minus=0.012))

    connector = pynn.AllToAllConnector(weights=0.0)
    input_projection = pynn.Projection(
        input_population,
        l0_exc_population,
        connector,
        rng=rng,
        synapse_dynamics=pynn.SynapseDynamics(slow=stdp_model))

    input_weights = input_projection.getWeights()

    print "[%d] Creating input projections" % node
    input_weights = connect_gauss(num['inputs'], num['l0_exc_neurons'],
                                  sigma_inp_exc0, w_inp_exc0_peak,
                                  num['inputs_maxneighbors'], input_weights,
                                  False)

    input_projection.setWeights(input_weights)

    # 11) exc0 -> exc1
    stdp_model = pynn.STDPMechanism(
        timing_dependence=pynn.SpikePairRule(tau_plus=20.0, tau_minus=20.0),
        weight_dependence=pynn.AdditiveWeightDependence(w_min=0,
                                                        w_max=w_exc0_exc1_max,
                                                        A_plus=0.012,
                                                        A_minus=0.012))

    connector = pynn.AllToAllConnector(weights=0.0)
    l1_projection = pynn.Projection(
        l0_exc_population,
        l1_exc_population,
        connector,
        rng=rng,
        synapse_dynamics=pynn.SynapseDynamics(slow=stdp_model))

    exc0_exc1_weights = l1_projection.getWeights()

    print "[%d] Creating input projections" % node
    exc0_exc1_weights = connect_gauss(num['l0_exc_neurons'],
                                      num['l1_exc_neurons'], sigma_exc0_exc1,
                                      w_exc0_exc1_peak,
                                      num['l0_l1_maxneighbors'],
                                      exc0_exc1_weights, False)

    l1_projection.setWeights(exc0_exc1_weights)

    return node, l0_exc_population, l1_exc_population, l0_inh_population, l1_inh_population, input_population, input_projection, l1_projection
Esempio n. 34
0
def setupNetwork():
    node = pynn.setup(timestep=0.1,
                      min_delay=1.0,
                      max_delay=1.0,
                      debug=True,
                      quit_on_end=False)
    print "Process with rank %d running on %s" % (node, socket.gethostname())

    rng = NumpyRNG(seed=seed, parallel_safe=True)

    print "[%d] Creating populations" % node
    # 1) create excitatory populations
    l0_exc_population = pynn.Population(num['l0_exc_neurons'],
                                        pynn.IF_cond_exp,
                                        cell_params,
                                        label="exc0")
    l0_exc_population.record()
    #l0_exc_population.record_v()
    l1_exc_population = pynn.Population(num['l1_exc_neurons'],
                                        pynn.IF_cond_exp,
                                        cell_params,
                                        label="exc1")
    l1_exc_population.record()

    # 2) create inhibitory population
    l0_inh_population = pynn.Population(num['l0_inh_neurons'],
                                        pynn.IF_cond_exp,
                                        cell_params,
                                        label="inh0")
    l0_inh_population.record()
    l1_inh_population = pynn.Population(num['l1_inh_neurons'],
                                        pynn.IF_cond_exp,
                                        cell_params,
                                        label="inh1")
    l1_inh_population.record()

    # 3) exc0 -> inh0
    inh_connector = pynn.FixedProbabilityConnector(p_exc0_inh0,
                                                   weights=w_exc0_inh0)
    l0_exc_inh_projection = pynn.Projection(l0_exc_population,
                                            l0_inh_population, inh_connector)

    # 4) exc1 -> inh1
    inh_connector = pynn.FixedProbabilityConnector(p_exc1_inh1,
                                                   weights=w_exc1_inh1)
    l1_exc_inh_projection = pynn.Projection(l1_exc_population,
                                            l1_inh_population, inh_connector)

    # 5) exc0 -> exc0
    exc_connector = pynn.FixedProbabilityConnector(
        p_exc0_exc0, weights=w_exc0_exc0, allow_self_connections=False)
    l0_exc_exc_projection = pynn.Projection(l0_exc_population,
                                            l0_exc_population, exc_connector)

    # 6) exc1 -> exc1
    #l1_exc_exc_projection = pynn.Projection(l1_exc_population,l1_exc_population,exc_connector)

    #for i in range(num['nodes']):
    #    exc_inh_projections.append(Projection(exc_populations[i],inh_population,inh_connector))
    #    for j in range(i-num['neighbours'],i+num['neighbours']+1):
    #        if j != i:
    #            exc_connector = OneToOneConnector(weights=1.0/abs(j-i))

    #            if j<0:
    #                j+=num['nodes']
    #            if j> num['nodes']-1:
    #                j-=num['nodes']

    #            exc_exc_projections.append(Projection(exc_populations[i],exc_populations[j],exc_connector))

    # 7) inh0 -> exc0
    connector = pynn.FixedProbabilityConnector(p_inh0_exc0,
                                               weights=w_inh0_exc0)
    l0_inh_exc_projection = pynn.Projection(l0_inh_population,
                                            l0_exc_population,
                                            connector,
                                            target="inhibitory")

    # 8) inh1 -> exc1
    connector = pynn.FixedProbabilityConnector(p_inh1_exc1,
                                               weights=w_inh1_exc1)
    l1_inh_exc_projection = pynn.Projection(l1_inh_population,
                                            l1_exc_population,
                                            connector,
                                            target="inhibitory")

    # 9) create input population
    input_population = pynn.Population(num['inputs'],
                                       pynn.SpikeSourcePoisson,
                                       {'rate': input_rate},
                                       label="input")
    input_population.record()

    # 10) input -> exc0
    stdp_model = pynn.STDPMechanism(
        timing_dependence=pynn.SpikePairRule(tau_plus=20.0, tau_minus=20.0),
        weight_dependence=pynn.AdditiveWeightDependence(w_min=0,
                                                        w_max=w_inp_exc0_max,
                                                        A_plus=0.01,
                                                        A_minus=0.012))
    connector = pynn.AllToAllConnector(weights=pynn.RandomDistribution(
        distribution='uniform', parameters=[0.00, w_inp_exc0_max], rng=rng))
    #connector = pynn.FixedProbabilityConnector(p_inp_exc0,weights=w_inp_exc0)
    input_projection = pynn.Projection(
        input_population,
        l0_exc_population,
        connector,
        rng=rng,
        #synapse_dynamics=pynn.SynapseDynamics(slow=stdp_model)
    )

    # 11) exc0 -> exc1
    stdp_model = pynn.STDPMechanism(
        timing_dependence=pynn.SpikePairRule(tau_plus=20.0, tau_minus=20.0),
        weight_dependence=pynn.AdditiveWeightDependence(w_min=0,
                                                        w_max=w_exc0_exc1_max,
                                                        A_plus=0.01,
                                                        A_minus=0.012))
    connector = pynn.AllToAllConnector(weights=pynn.RandomDistribution(
        distribution='uniform', parameters=[0.00, w_exc0_exc1_max], rng=rng))
    #connector = pynn.FixedProbabilityConnector(0.05, weights=0.01)
    l1_projection = pynn.Projection(
        l0_exc_population,
        l1_exc_population,
        connector,
        rng=rng,
        #synapse_dynamics=pynn.SynapseDynamics(slow=stdp_model)
    )

    return node, l0_exc_population, l1_exc_population, l0_inh_population, l1_inh_population, input_population, input_projection, l1_projection
Esempio n. 35
0
timer = Timer()
seed = 764756387
tstop = 1000.0  # ms
input_rate = 100.0  # Hz
cell_params = {'tau_refrac': 2.0,  # ms
               'v_thresh':  -50.0, # mV
               'tau_syn_E':  2.0,  # ms
               'tau_syn_I':  2.0}  # ms
n_record = 5

node = setup(timestep=0.025, min_delay=1.0, max_delay=10.0, debug=True, quit_on_end=False)
print("Process with rank %d running on %s" % (node, socket.gethostname()))


rng = NumpyRNG(seed=seed, parallel_safe=True)

print("[%d] Creating populations" % node)
n_spikes = int(2 * tstop * input_rate / 1000.0)
spike_times = numpy.add.accumulate(rng.next(n_spikes, 'exponential',
                                            {'beta': 1000.0 / input_rate}, mask_local=False))

input_population = Population(100, SpikeSourceArray(spike_times=spike_times), label="input")
output_population = Population(10, IF_curr_exp(**cell_params), label="output")
print("[%d] input_population cells: %s" % (node, input_population.local_cells))
print("[%d] output_population cells: %s" % (node, output_population.local_cells))

print("[%d] Connecting populations" % node)
timer.start()
connector = CSAConnector(csa.random(0.5))
syn = StaticSynapse(weight=0.1)
Esempio n. 36
0
def network(para, simulator):

    if simulator == "-nest":
        sim = import_module("pyNN.nest")
    elif simulator == "-neuron":
        sim = import_module("pyNN.neuron")
    elif simulator == "-brian":
        sim = import_module("pyNN.brian")

    # initialize pyNN simulation
    sim.setup(timestep=para["dt"])

    # Parameters for excitatory neurons
    E_parameters = {
        "tau_m": para["taumE"],
        "cm": para["taumE"] / para["R"],
        "v_rest": para["Vr"],
        "v_reset": para["Vreset"],
        "v_thresh": para["Vt"],
        "tau_refrac": para["tref"],
        "tau_syn_E": para["tau_syn_e"],
        "tau_syn_I": para["tau_syn_e"],
        "i_offset": (para["VextE"] / para["R"])
    }

    # Paramters for inhibitory neurons
    I_parameters = {
        "tau_m": para["taumI"],
        "cm": para["taumI"] / para["R"],
        "v_rest": para["Vr"],
        "v_reset": para["Vreset"],
        "v_thresh": para["Vt"],
        "tau_refrac": para["tref"],
        "tau_syn_E": para["tau_syn_e"],
        "tau_syn_I": para["tau_syn_e"],
        "i_offset": (para["VextI"] / para["R"])
    }

    ############ All Excitatory and inhibitory neurons ########################

    # number of excitatory neurons in one network
    NE_net = int(para['N'] * para['Ne'])
    # Total number of excitatory neurons
    NE = int(para['NAreas'] * NE_net)

    # number of excitatory neurons in one network
    NI_net = int(para['N'] * (1 - para['Ne']))
    # Total number of inhibitory neurons
    NI = int((para['NAreas'] * NI_net))

    popE = sim.Population(NE, sim.IF_curr_alpha, E_parameters, label="popE")
    popI = sim.Population(NI, sim.IF_curr_alpha, I_parameters, label="popI")

    ################################ Noise ####################################

    # Noise on excitatory neurons
    stdNoiseE = (para["sigma"] / para["R"]) * (para["taumE"]**
                                               0.5) / (para["dt"]**0.5)
    for i in range(NE):
        noise = sim.NoisyCurrentSource(mean=0,
                                       stdev=stdNoiseE,
                                       start=0.0,
                                       stop=para["duration"])
        popE[i].inject(noise)

    # Noise on inhibitory neurons
    stdNoiseI = (para["sigma"] / para["R"]) * (para["taumI"]**
                                               0.5) / (para["dt"]**0.5)
    for i in range(NI):
        noise = sim.NoisyCurrentSource(mean=0,
                                       stdev=stdNoiseI,
                                       start=0.0,
                                       stop=para["duration"])
        popI[i].inject(noise)

    # paramters for initial conditions
    kernelseed = 5456532
    rng = NumpyRNG(kernelseed, parallel_safe=True)
    uniformDistr = RandomDistribution('uniform',
                                      low=para["Vr"],
                                      high=para["Vt"],
                                      rng=rng)
    sim.initialize(popE, v=uniformDistr)
    sim.initialize(popI, v=uniformDistr)

    # Separate population in population views
    popEList = []
    popIList = []

    # Store projections
    EE = []
    IE = []
    EI = []
    II = []
    EElongRange = []
    IElongRange = []

    for i in range(para['NAreas']):

        # store sub populations in lists
        popEList.append(popE[(i * NE_net):((i + 1) * NE_net)])
        popIList.append(popI[(i * NI_net):((i + 1) * NI_net)])

        #### Synapses

        # Weights for recurrent connections
        wEE_alpha = (((1 + para['alpha'] * para["hier"][i]) * para["wEE"]) /
                     para['coeffE'])[0]  #[nA]
        wIE_alpha = (((1 + para['alpha'] * para["hier"][i]) * para["wIE"]) /
                     para['coeffI'])[0]  #[nA]
        wEI_alpha = (para["wEI"] / para['coeffE']) * -1  #[nA]
        wII_alpha = (para["wII"] / para['coeffI']) * -1  #[nA]

        # Connections
        EE_connections = sim.Projection(
            popEList[i], popEList[i],
            sim.FixedProbabilityConnector(p_connect=para["probIntra"]),
            sim.StaticSynapse(weight=wEE_alpha, delay=para["dlocal"]))
        IE_connections = sim.Projection(
            popEList[i], popIList[i],
            sim.FixedProbabilityConnector(p_connect=para["probIntra"]),
            sim.StaticSynapse(weight=wIE_alpha, delay=para["dlocal"]))
        EI_connections = sim.Projection(
            popIList[i], popEList[i],
            sim.FixedProbabilityConnector(p_connect=para["probIntra"]),
            sim.StaticSynapse(weight=wEI_alpha, delay=para["dlocal"]))
        II_connections = sim.Projection(
            popIList[i], popIList[i],
            sim.FixedProbabilityConnector(p_connect=para["probIntra"]),
            sim.StaticSynapse(weight=wII_alpha, delay=para["dlocal"]))

        # Store projections in lists
        EE.append(EE_connections)
        IE.append(IE_connections)
        EI.append(EI_connections)
        II.append(II_connections)

    # Long Range connections
    for i in range(para['NAreas']):
        for j in range(para['NAreas']):

            if i != j:

                # Weights
                wEE_alphaLR = (
                    (1 + para['alpha'] * para["hier"][j]) * para['muEE'] *
                    para["conn"][j, i])[0] / para['coeffE']
                wIE_alphaLR = (
                    (1 + para['alpha'] * para["hier"][j]) * para['muIE'] *
                    para["conn"][j, i])[0] / para['coeffI']

                # Delay
                # Mean for delay distribution
                meanlr = para["delayMat"][j, i]
                # Standard deviation for delay distribution
                varlr = para['lrvar'] * meanlr
                dLR = RandomDistribution('normal', [meanlr, varlr],
                                         rng=NumpyRNG(seed=4242))

                # Connections
                EE_connectionsLR = sim.Projection(
                    popEList[i], popEList[j],
                    sim.FixedProbabilityConnector(p_connect=para["probInter"]),
                    sim.StaticSynapse(weight=wEE_alphaLR, delay=dLR))
                IE_connectionsLR = sim.Projection(
                    popEList[i], popIList[j],
                    sim.FixedProbabilityConnector(p_connect=para["probInter"]),
                    sim.StaticSynapse(weight=wIE_alphaLR, delay=dLR))

                # Store projections in list
                EElongRange.append(EE_connectionsLR)
                IElongRange.append(IE_connectionsLR)

    # Stimulus
    amplitude = para['currval'] / para['R']  #[mV]
    pulse = sim.DCSource(amplitude=amplitude,
                         start=300.0,
                         stop=300.0 + para['currdur'])
    pulse.inject_into(popEList[0])

    # Record data
    popE.record('spikes')
    popI.record('spikes')

    # Run
    sim.run(para['duration'])

    # Store spikes
    spikesE_in = popE.get_data()
    spikesI_in = popI.get_data()

    # Generate array with spike data for popE
    spkpopE = spikeData(popE)
    spkpopI = spikeData(popI)

    return spkpopE, spkpopI
Esempio n. 37
0
"""

import socket, os
from importlib import import_module
import numpy
from pyNN.utility import get_script_args, init_logging, normalized_filename

simulator_name = get_script_args(1)[0]
sim = import_module("pyNN.%s" % simulator_name)

from pyNN.random import NumpyRNG, RandomDistribution

init_logging(None, debug=True)

seed = 764756387
rng = NumpyRNG(seed=seed, parallel_safe=True)
tstop = 1000.0 # ms
input_rate = 100.0 # Hz
cell_params = {'tau_refrac': 2.0,  # ms
               'v_thresh':  -50.0, # mV
               'tau_syn_E':  2.0,  # ms
               'tau_syn_I':  2.0,  # ms
               'tau_m': RandomDistribution('uniform', low=18.0, high=22.0, rng=rng)
}
n_record = 3

node = sim.setup(timestep=0.025, min_delay=1.0, max_delay=1.0, debug=True, quit_on_end=False)
print("Process with rank %d running on %s" % (node, socket.gethostname()))

print("[%d] Creating populations" % node)
n_spikes = int(2*tstop*input_rate/1000.0)
Esempio n. 38
0
# With CE neurons the pop rate is simply the product
# nu_ex*C_E  the factor 1000.0 changes the units from
# spikes per ms to spikes per second.
p_rate = 1000.0 * nu_ex * C_E
print("Rate is: %f HZ" % (p_rate / 1000))

# Neural Parameters
pynn.setup(timestep=1.0, min_delay=1.0, max_delay=16.0)

if simulator_Name == "spiNNaker":

    # Makes it easy to scale up the number of cores
    pynn.set_number_of_neurons_per_core(pynn.IF_curr_exp, 100)
    pynn.set_number_of_neurons_per_core(pynn.SpikeSourcePoisson, 100)

rng = NumpyRNG(seed=1)

v_distr_exc = RandomDistribution('uniform', low=-10.0, high=0.0, rng=rng)
v_distr_inh = RandomDistribution('uniform', low=-10.0, high=0.0, rng=rng)

exc_cell_params = {
    'cm': 1.0,  # pf
    'tau_m': tau_m,
    'tau_refrac': tau_ref,
    'v_rest': v_rest,
    'v_reset': v_reset,
    'v_thresh': V_th,
    'tau_syn_E': tauSyn,
    'tau_syn_I': tauSyn,
    'i_offset': 0.9
}
Esempio n. 39
0
    print "%d Initialising the simulator with %d threads..." % (
        rank, extra['threads'])
else:
    print "%d Initialising the simulator with single thread..." % (rank)


# Small function to display information only on node 1
def nprint(s):
    if (rank == 0):
        print s


timer.start()  # start timer on construction

print "%d Setting up random number generator" % rank
rng = NumpyRNG(kernelseed, parallel_safe=True)

print "%d Creating excitatory population with %d neurons." % (rank, NE)
celltype = IF_curr_alpha(**cell_params)
E_net = Population(NE, celltype, label="E_net")

print "%d Creating inhibitory population with %d neurons." % (rank, NI)
I_net = Population(NI, celltype, label="I_net")

print "%d Initialising membrane potential to random values between %g mV and %g mV." % (
    rank, U0, theta)
uniformDistr = RandomDistribution('uniform', [U0, theta], rng)
E_net.initialize(v=uniformDistr)
I_net.initialize(v=uniformDistr)

print "%d Creating excitatory Poisson generator with rate %g spikes/s." % (
cell_params = {'tau_refrac':2.0,'v_thresh':-50.0,'tau_syn_E':2.0, 'tau_syn_I':2.0}

cellsA = Population((cellNumA,), cellType, cell_params, label="Cells_A")
cellsB = Population((cellNumB,), cellType, cell_params, label="Cells_B")


xMin=0
xMax=200
yMin=0
yMax=200
zMin=0
zMax=50


for cell in cellsA:
    cell.position[0] = xMin+(NumpyRNG.next(rng)*(xMax-xMin))
    cell.position[1] = yMin+(NumpyRNG.next(rng)*(yMax-yMin))
    cell.position[2] = zMin+(NumpyRNG.next(rng)*(zMax-zMin))

for cell in cellsB:
    cell.position[0] = xMin+(NumpyRNG.next(rng)*(xMax-xMin))
    cell.position[1] = yMin+(NumpyRNG.next(rng)*(yMax-yMin))
    cell.position[2] = zMin+(NumpyRNG.next(rng)*(zMax-zMin))



indicesA = []
indicesB = []

for idA in cellsA:
    index  = cellsA.id_to_index(idA)