Esempio n. 1
0
def brunel_network(eta, g, delay, J_E):
    """
    A brunel network, from:

    Brunel N, Dynamics of Sparsely Connected Networks of Excitatory and
    Inhibitory Spiking Neurons, Journal of Computational Neuroscience 8,
    183-208 (2000).

    Implementation adapted from:
    http://www.nest-simulator.org/py_sample/random-balanced-network-exp-synapses-multiple-time-constants/

    Parameters
    ----------
    g : {int, float}, optional
        Ratio inhibitory weight/excitatory weight. Default is 5.
    eta : {int, float}, optional
        External rate relative to threshold rate. Default is 2.
    delay : {int, float}, optional
        Synaptic delay in ms. Default is 1.5.
    J_E : {int, float}, optional
        Amplitude of excitatory postsynaptic current. Default is 0.1
    """

    # Network parameters
    N_rec = 20  # Record from 20 neurons
    simulation_end = 1000  # Simulation time

    tau_m = 20.0  # Time constant of membrane potential in ms
    V_th = 20.0
    N_E = 10000  # Number of inhibitory neurons
    N_I = 2500  # Number of excitatory neurons
    N_neurons = N_E + N_I  # Number of neurons in total
    C_E = int(N_E / 10)  # Number of excitatory synapses per neuron
    C_I = int(N_I / 10)  # Number of inhibitory synapses per neuron
    J_I = -g * J_E  # Amplitude of inhibitory postsynaptic current

    nu_ex = eta * V_th / (J_E * C_E * tau_m)
    p_rate = 1000.0 * nu_ex * C_E

    nest.ResetKernel()

    # Configure kernel
    nest.SetKernelStatus({"grng_seed": 10})

    nest.SetDefaults(
        'iaf_psc_delta', {
            'C_m': 1.0,
            'tau_m': tau_m,
            't_ref': 2.0,
            'E_L': 0.0,
            'V_th': V_th,
            'V_reset': 10.0
        })

    # Create neurons
    nodes = nest.Create('iaf_psc_delta', N_neurons)
    nodes_E = nodes[:N_E]
    nodes_I = nodes[N_E:]

    noise = nest.Create('poisson_generator', 1, {'rate': p_rate})

    spikes = nest.Create('spike_detector', 2, [{
        'label': 'brunel-py-ex'
    }, {
        'label': 'brunel-py-in'
    }])
    spikes_E = spikes[:1]
    spikes_I = spikes[1:]

    # Connect neurons to each other
    nest.CopyModel('static_synapse_hom_w', 'excitatory', {
        'weight': J_E,
        'delay': delay
    })
    nest.Connect(nodes_E, nodes, {
        'rule': 'fixed_indegree',
        'indegree': C_E
    }, 'excitatory')

    nest.CopyModel('static_synapse_hom_w', 'inhibitory', {
        'weight': J_I,
        'delay': delay
    })
    nest.Connect(nodes_I, nodes, {
        'rule': 'fixed_indegree',
        'indegree': C_I
    }, 'inhibitory')

    # Connect poisson generator to all nodes
    nest.Connect(noise, nodes, syn_spec='excitatory')

    nest.Connect(nodes_E[:N_rec], spikes_E)
    nest.Connect(nodes_I[:N_rec], spikes_I)

    # Run the simulation
    nest.Simulate(simulation_end)

    events_E = nest.GetStatus(spikes_E, 'events')[0]
    events_I = nest.GetStatus(spikes_I, 'events')[0]

    # Excitatory spike trains
    # Makes sure the spiketrain is added even if there are no results
    # to get a regular result
    spiketrains = []
    for sender in nodes_E[:N_rec]:
        spiketrain = events_E["times"][events_E["senders"] == sender]
        spiketrains.append(spiketrain)

    return simulation_end, spiketrains
    def _test_model(self,
                    referenceModel,
                    testant,
                    gsl_error_tol,
                    tolerance=0.000001,
                    nest_ref_model_opts=None,
                    custom_model_opts=None):

        spike_times = [100.0, 200.0]
        spike_weights = [1., -1.]

        nest.ResetKernel()
        neuron1 = nest.Create(referenceModel, params=nest_ref_model_opts)
        neuron2 = nest.Create(testant, params=custom_model_opts)

        if gsl_error_tol is not None:
            neuron2.set({"gsl_error_tol": gsl_error_tol})

        spikegenerator = nest.Create('spike_generator',
                                     params={
                                         'spike_times': spike_times,
                                         'spike_weights': spike_weights
                                     })

        nest.Connect(spikegenerator, neuron1)
        nest.Connect(spikegenerator, neuron2)

        multimeter1 = nest.Create('multimeter')
        multimeter2 = nest.Create('multimeter')

        V_m_specifier = 'V_m'  # 'delta_V_m'
        multimeter1.set({"record_from": [V_m_specifier]})
        multimeter2.set({"record_from": [V_m_specifier]})

        nest.Connect(multimeter1, neuron1)
        nest.Connect(multimeter2, neuron2)

        nest.Simulate(400.0)
        Vms1 = multimeter1.get("events")[V_m_specifier]
        ts1 = multimeter1.get("events")["times"]

        Vms2 = multimeter2.get("events")[V_m_specifier]
        ts2 = multimeter2.get("events")["times"]

        if TEST_PLOTS:
            fig, ax = plt.subplots(2, 1)
            ax[0].plot(ts1, Vms1, label="Reference " + referenceModel)
            ax[1].plot(ts2, Vms2, label="Testant " + testant)
            for _ax in ax:
                _ax.legend(loc='upper right')
                _ax.grid()
            plt.savefig("/tmp/nestml_nest_integration_test_[" +
                        referenceModel + "]_[" + testant + "].png")
            plt.close(fig)

        if TEST_PLOTS:
            for figsize, fname_snip in zip([(8, 5), (4, 3)], ["", "_small"]):
                fig, ax = plt.subplots(1, 1, figsize=figsize)
                ax = [ax]
                ax[0].plot(ts2, Vms2, label=testant)
                for _ax in ax:
                    _ax.grid()
                ax[0].set_xlabel("Time [ms]")
                ax[0].set_ylabel("$V_m$ [mV]")
                plt.tight_layout()
                plt.savefig("/tmp/nestml_models_library_[" + referenceModel +
                            "]_synaptic_response" + fname_snip + ".png")
                plt.close(fig)

        for index in range(0, len(Vms1)):
            if abs(Vms1[index] - Vms2[index]) > tolerance \
                    or np.isnan(Vms1[index]) \
                    or np.isnan(Vms2[index]):
                print(
                    str(Vms1[index]) + " differs from  " + str(Vms2[index]) +
                    " at iteration: " + str(index) +
                    " of overall iterations: " + str(len(Vms1)))
                raise Exception(testant + ": TEST FAILED")

        print(testant + " PASSED")
Esempio n. 3
0
 def setUp(self):
     nest.ResetKernel()
     nest.set_verbosity('M_ERROR')
    def test_SynapseDepressionFacilitation(self):
        """Ensure that depression and facilitation work correctly"""

        nest.set_verbosity('M_WARNING')

        # This is done using the spike pairing experiment of
        # Clopath et al. 2010. First we specify the parameters
        resolution = 0.1
        init_w = 0.5
        spike_times_pre = [[29., 129., 229., 329., 429.],
                           [29., 62.3, 95.7, 129., 162.3],
                           [29., 49., 69., 89., 109.],
                           [129., 229., 329., 429., 529., 629.],
                           [62.3, 95.6, 129., 162.3, 195.6, 229.],
                           [49., 69., 89., 109., 129., 149.]]
        spike_times_post = [[19., 119., 219., 319., 419.],
                            [19., 52.3, 85.7, 119., 152.3],
                            [19., 39., 59., 79., 99.],
                            [139., 239., 339., 439., 539., 639.],
                            [72.3, 105.6, 139., 172.3, 205.6, 239.],
                            [59., 79., 99., 119., 139., 159.]]
        tested_models = ["aeif_psc_delta_clopath", "hh_psc_alpha_clopath"]

        # Loop over tested neuron models
        for nrn_model in tested_models:
            if (nrn_model == "aeif_psc_delta_clopath"):
                nrn_params = {
                    'V_m': -70.6,
                    'E_L': -70.6,
                    'V_peak': 33.0,
                    'C_m': 281.0,
                    'theta_minus': -70.6,
                    'theta_plus': -45.3,
                    'A_LTD': 14.0e-5,
                    'A_LTP': 8.0e-5,
                    'tau_u_bar_minus': 10.0,
                    'tau_u_bar_plus': 7.0,
                    'delay_u_bars': 4.0,
                    'a': 4.0,
                    'b': 0.0805,
                    'V_reset': -70.6 + 21.0,
                    'V_clamp': 33.0,
                    't_clamp': 2.0,
                    't_ref': 0.0,
                }
            elif (nrn_model == "hh_psc_alpha_clopath"):
                nrn_params = {
                    'V_m': -64.9,
                    'C_m': 100.0,
                    'tau_syn_ex': 0.2,
                    'tau_syn_in': 2.0,
                    'theta_minus': -64.9,
                    'theta_plus': -35.0,
                    'A_LTD': 14.0e-5,
                    'A_LTP': 8.0e-5,
                    'tau_u_bar_minus': 10.0,
                    'tau_u_bar_plus': 114.0,
                    'delay_u_bars': 5.0,
                }
            syn_weights = []
            # Loop over pairs of spike trains
            for (s_t_pre, s_t_post) in zip(spike_times_pre, spike_times_post):
                nest.ResetKernel()
                nest.resolution = resolution

                # Create one neuron
                nrn = nest.Create(nrn_model, 1, nrn_params)
                prrt_nrn = nest.Create("parrot_neuron", 1)

                # Create and connect spike generator
                spike_gen_pre = nest.Create("spike_generator", 1,
                                            {"spike_times": s_t_pre})

                nest.Connect(spike_gen_pre,
                             prrt_nrn,
                             syn_spec={"delay": resolution})

                if (nrn_model == "aeif_psc_delta_clopath"):
                    conn_weight = 80.0
                elif (nrn_model == "hh_psc_alpha_clopath"):
                    conn_weight = 2000.0

                spike_gen_params_post = {"spike_times": s_t_post}
                spike_gen_post = nest.Create("spike_generator", 1,
                                             {"spike_times": s_t_post})

                nest.Connect(spike_gen_post,
                             nrn,
                             syn_spec={
                                 "delay": resolution,
                                 "weight": conn_weight
                             })

                # Create weight recorder
                wr = nest.Create('weight_recorder', 1)

                # Create Clopath synapse with weight recorder
                nest.CopyModel("clopath_synapse", "clopath_synapse_rec",
                               {"weight_recorder": wr})

                syn_dict = {
                    "synapse_model": "clopath_synapse_rec",
                    "weight": init_w,
                    "delay": resolution
                }
                nest.Connect(prrt_nrn, nrn, syn_spec=syn_dict)

                # Simulation
                simulation_time = (10.0 + max(s_t_pre[-1], s_t_post[-1]))
                nest.Simulate(simulation_time)

                # Evaluation
                w_events = nest.GetStatus(wr)[0]["events"]
                weights = w_events["weights"]
                syn_weights.append(weights[-1])

            # Compare to expected result
            syn_weights = np.array(syn_weights)
            syn_weights = 100.0 * 15.0 * (syn_weights -
                                          init_w) / init_w + 100.0
            if (nrn_model == "aeif_psc_delta_clopath"):
                correct_weights = [
                    57.82638722, 72.16730112, 149.43359357, 103.30408341,
                    124.03640668, 157.02882555
                ]
            elif (nrn_model == "hh_psc_alpha_clopath"):
                correct_weights = [
                    70.14343863, 99.49206222, 178.1028757, 119.63314118,
                    167.37750688, 178.83111685
                ]

            self.assertTrue(
                np.allclose(syn_weights, correct_weights, rtol=1e-7))
can be found in models/sinusoidal_poisson_generator.h.

The script is structured into two parts and creates one common figure.
In Part 1, two instances of the `sinusoidal_poisson_generator` are
created with different parameters. Part 2 illustrates the effect of
the ``individual_spike_trains`` switch.
'''
'''
We import nest and modules required for analysis and plotting.
'''

import nest
import matplotlib.pyplot as plt
import numpy as np

nest.ResetKernel()  # in case we run the script multiple times from iPython
'''
We create two instances of the `sinusoidal_poisson_generator`
with two different parameter sets using `Create`. Moreover, we create
devices to record firing rates (`multimeter`) and spikes
(`spike_detector`) and connect them to the generators using `Connect`.
'''

nest.SetKernelStatus({'resolution': 0.01})

g = nest.Create('sinusoidal_poisson_generator',
                n=2,
                params=[{
                    'rate': 10000.0,
                    'amplitude': 5000.0,
                    'frequency': 10.0,
Esempio n. 6
0
    def run_synapse_test(
            self,
            neuron_model_name,
            ref_neuron_model_name,
            synapse_model_name,
            ref_synapse_model_name,
            resolution=1.,  # [ms]
            delay=1.,  # [ms]
            sim_time=None,  # if None, computed from pre and post spike times
            pre_spike_times=None,
            post_spike_times=None,
            vt_spike_times=None,
            fname_snip=""):

        if pre_spike_times is None:
            pre_spike_times = []

        if post_spike_times is None:
            post_spike_times = []

        if vt_spike_times is None:
            vt_spike_times = []

        if sim_time is None:
            sim_time = max(np.amax(pre_spike_times, initial=0.),
                           np.amax(post_spike_times, initial=0.),
                           np.amax(vt_spike_times, initial=0.)) + 5 * delay

        nest.ResetKernel()
        # nest.set_verbosity("M_ALL")
        nest.set_verbosity("M_ERROR")
        nest.SetKernelStatus({"resolution": resolution})
        nest.Install("nestml_jit_module")
        nest.Install("nestml_non_jit_module")

        print("Pre spike times: " + str(pre_spike_times))
        print("Post spike times: " + str(post_spike_times))
        print("VT spike times: " + str(vt_spike_times))

        # create spike_generators with these times
        pre_sg = nest.Create("spike_generator",
                             params={"spike_times": pre_spike_times})
        post_sg = nest.Create("spike_generator",
                              params={
                                  "spike_times": post_spike_times,
                                  "allow_offgrid_times": True
                              })
        vt_sg = nest.Create("spike_generator",
                            params={
                                "spike_times": vt_spike_times,
                                "allow_offgrid_times": True
                            })

        # create  volume transmitter
        vt = nest.Create("volume_transmitter")
        vt_parrot = nest.Create("parrot_neuron")
        nest.Connect(vt_sg, vt_parrot)
        nest.Connect(vt_parrot,
                     vt,
                     syn_spec={
                         "synapse_model": "static_synapse",
                         "weight": 1.,
                         "delay": 1.
                     })  # delay is ignored?!
        vt_gid = vt.get("global_id")

        # set up custom synapse models
        wr = nest.Create("weight_recorder")
        wr_ref = nest.Create('weight_recorder')
        nest.CopyModel(
            synapse_model_name, "stdp_nestml_rec", {
                "weight_recorder": wr[0],
                "w": 1.,
                "the_delay": delay,
                "receptor_type": 0,
                "vt": vt_gid
            })
        nest.CopyModel(
            ref_synapse_model_name, "stdp_ref_rec", {
                "weight_recorder": wr_ref[0],
                "weight": 1.,
                "delay": delay,
                "receptor_type": 0,
                "vt": vt_gid
            })

        # create parrot neurons and connect spike_generators
        if sim_mdl:
            pre_neuron = nest.Create("parrot_neuron")
            post_neuron = nest.Create(neuron_model_name)

        if sim_ref:
            pre_neuron_ref = nest.Create("parrot_neuron")
            post_neuron_ref = nest.Create(ref_neuron_model_name)

        if sim_mdl:
            spikedet_pre = nest.Create("spike_recorder")
            spikedet_post = nest.Create("spike_recorder")
            spikedet_vt = nest.Create("spike_recorder")
            mm = nest.Create(
                "multimeter",
                params={
                    "record_from":
                    ["V_m", "post_tr__for_neuromodulated_stdp_nestml"]
                })

        if sim_ref:
            spikedet_pre_ref = nest.Create("spike_recorder")
            spikedet_post_ref = nest.Create("spike_recorder")
            mm_ref = nest.Create("multimeter", params={"record_from": ["V_m"]})

        if sim_mdl:
            nest.Connect(pre_sg,
                         pre_neuron,
                         "one_to_one",
                         syn_spec={"delay": 1.})
            nest.Connect(post_sg,
                         post_neuron,
                         "one_to_one",
                         syn_spec={
                             "delay": 1.,
                             "weight": 9999.
                         })
            nest.Connect(pre_neuron,
                         post_neuron,
                         "all_to_all",
                         syn_spec={"synapse_model": "stdp_nestml_rec"})
            nest.Connect(mm, post_neuron)
            nest.Connect(pre_neuron, spikedet_pre)
            nest.Connect(post_neuron, spikedet_post)
            nest.Connect(vt_parrot, spikedet_vt)
        if sim_ref:
            nest.Connect(pre_sg,
                         pre_neuron_ref,
                         "one_to_one",
                         syn_spec={"delay": 1.})
            nest.Connect(post_sg,
                         post_neuron_ref,
                         "one_to_one",
                         syn_spec={
                             "delay": 1.,
                             "weight": 9999.
                         })
            nest.Connect(pre_neuron_ref,
                         post_neuron_ref,
                         "all_to_all",
                         syn_spec={"synapse_model": "stdp_ref_rec"})
            nest.Connect(mm_ref, post_neuron_ref)
            nest.Connect(pre_neuron_ref, spikedet_pre_ref)
            nest.Connect(post_neuron_ref, spikedet_post_ref)

        # get STDP synapse and weight before protocol
        if sim_mdl:
            syn = nest.GetConnections(source=pre_neuron,
                                      synapse_model="stdp_nestml_rec")
        if sim_ref:
            syn_ref = nest.GetConnections(source=pre_neuron_ref,
                                          synapse_model="stdp_ref_rec")

        n_steps = int(np.ceil(sim_time / resolution)) + 1
        t = 0.
        t_hist = []
        if sim_mdl:
            w_hist = []
        if sim_ref:
            w_hist_ref = []
        while t <= sim_time:
            nest.Simulate(resolution)
            t += resolution
            t_hist.append(t)
            if sim_ref:
                w_hist_ref.append(nest.GetStatus(syn_ref)[0]["weight"])
            if sim_mdl:
                w_hist.append(nest.GetStatus(syn)[0]["w"])

        # plot
        if TEST_PLOTS:
            fig, ax = plt.subplots(nrows=2)
            ax1, ax2 = ax

            if sim_mdl:
                timevec = nest.GetStatus(mm, "events")[0]["times"]
                V_m = nest.GetStatus(mm, "events")[0]["V_m"]
                ax2.plot(timevec,
                         nest.GetStatus(mm, "events")[0]
                         ["post_tr__for_neuromodulated_stdp_nestml"],
                         label="post_tr nestml")
                ax1.plot(timevec, V_m, label="nestml", alpha=.7, linestyle=":")
            if sim_ref:
                pre_ref_spike_times_ = nest.GetStatus(spikedet_pre_ref,
                                                      "events")[0]["times"]
                timevec = nest.GetStatus(mm_ref, "events")[0]["times"]
                V_m = nest.GetStatus(mm_ref, "events")[0]["V_m"]
                ax1.plot(timevec, V_m, label="nest ref", alpha=.7)
            ax1.set_ylabel("V_m")

            for _ax in ax:
                _ax.grid(which="major", axis="both")
                _ax.grid(which="minor", axis="x", linestyle=":", alpha=.4)
                # _ax.minorticks_on()
                _ax.set_xlim(0., sim_time)
                _ax.legend()
            fig.savefig("/tmp/stdp_synapse_test" + fname_snip + "_V_m.png",
                        dpi=300)

        # plot
        if TEST_PLOTS:
            fig, ax = plt.subplots(nrows=4)
            ax1, ax2, ax3, ax4 = ax

            if sim_mdl:
                pre_spike_times_ = nest.GetStatus(spikedet_pre,
                                                  "events")[0]["times"]
                print("Actual pre spike times: " + str(pre_spike_times_))
            if sim_ref:
                pre_ref_spike_times_ = nest.GetStatus(spikedet_pre_ref,
                                                      "events")[0]["times"]
                print("Actual pre ref spike times: " +
                      str(pre_ref_spike_times_))

            if sim_mdl:
                n_spikes = len(pre_spike_times_)
                for i in range(n_spikes):
                    if i == 0:
                        _lbl = "nestml"
                    else:
                        _lbl = None
                    ax1.plot(2 * [pre_spike_times_[i] + delay], [0, 1],
                             linewidth=2,
                             color="blue",
                             alpha=.4,
                             label=_lbl)

            if sim_mdl:
                post_spike_times_ = nest.GetStatus(spikedet_post,
                                                   "events")[0]["times"]
                print("Actual post spike times: " + str(post_spike_times_))
            if sim_ref:
                post_ref_spike_times_ = nest.GetStatus(spikedet_post_ref,
                                                       "events")[0]["times"]
                print("Actual post ref spike times: " +
                      str(post_ref_spike_times_))

            if sim_ref:
                n_spikes = len(pre_ref_spike_times_)
                for i in range(n_spikes):
                    if i == 0:
                        _lbl = "nest ref"
                    else:
                        _lbl = None
                    ax1.plot(2 * [pre_ref_spike_times_[i] + delay], [0, 1],
                             linewidth=2,
                             color="cyan",
                             label=_lbl,
                             alpha=.4)
            ax1.set_ylabel("Pre spikes")

            ax2.plot(
                timevec,
                nest.GetStatus(
                    mm,
                    "events")[0]["post_tr__for_neuromodulated_stdp_nestml"],
                label="nestml post tr")
            if sim_mdl:
                n_spikes = len(post_spike_times_)
                for i in range(n_spikes):
                    if i == 0:
                        _lbl = "nestml"
                    else:
                        _lbl = None
                    ax2.plot(2 * [post_spike_times_[i]], [0, 1],
                             linewidth=2,
                             color="black",
                             alpha=.4,
                             label=_lbl)
            if sim_ref:
                n_spikes = len(post_ref_spike_times_)
                for i in range(n_spikes):
                    if i == 0:
                        _lbl = "nest ref"
                    else:
                        _lbl = None
                    ax2.plot(2 * [post_ref_spike_times_[i]], [0, 1],
                             linewidth=2,
                             color="red",
                             alpha=.4,
                             label=_lbl)
            ax2.set_ylabel("Post spikes")

            if sim_mdl:
                vt_spike_times_ = nest.GetStatus(spikedet_vt,
                                                 "events")[0]["times"]
                print("Actual vt spike times: " + str(vt_spike_times_))

            if sim_mdl:
                n_spikes = len(vt_spike_times_)
                for i in range(n_spikes):
                    ax3.plot(2 * [vt_spike_times_[i]], [0, 1],
                             linewidth=2,
                             color="black",
                             alpha=.4)
            ax3.set_ylabel("VT spikes")

            if sim_mdl:
                ax4.plot(t_hist, w_hist, marker="o", label="nestml")
            if sim_ref:
                ax4.plot(t_hist,
                         w_hist_ref,
                         linestyle="--",
                         marker="x",
                         label="ref")
            ax4.set_xlabel("Time [ms]")
            ax4.set_ylabel("w")

            for _ax in ax:
                _ax.grid(which="major", axis="both")
                _ax.xaxis.set_major_locator(
                    matplotlib.ticker.FixedLocator(
                        np.arange(0, np.ceil(sim_time))))
                _ax.set_xlim(0., sim_time)
                _ax.legend()
            fig.savefig("/tmp/stdp_dopa_synapse_test" + fname_snip + ".png",
                        dpi=300)

        # verify
        MAX_ABS_ERROR = 1E-6
        assert np.all(
            np.abs(np.array(w_hist) - np.array(w_hist_ref)) < MAX_ABS_ERROR)
Esempio n. 7
0
    def test_GetTargetNodesPositions(self):
        """Interface check for finding targets."""
        ldict = {
            'elements': ['iaf_neuron', 'iaf_psc_alpha'],
            'rows': 3,
            'columns': 3,
            'extent': [2., 2.],
            'edge_wrap': True
        }
        cdict = {
            'connection_type': 'divergent',
            'mask': {
                'grid': {
                    'rows': 2,
                    'columns': 2
                }
            }
        }
        nest.ResetKernel()
        l = topo.CreateLayer(ldict)
        ian = [
            gid for gid in nest.GetLeaves(l)[0]
            if nest.GetStatus([gid], 'model')[0] == 'iaf_neuron'
        ]
        ipa = [
            gid for gid in nest.GetLeaves(l)[0]
            if nest.GetStatus([gid], 'model')[0] == 'iaf_psc_alpha'
        ]

        # connect ian -> all using static_synapse
        cdict.update({
            'sources': {
                'model': 'iaf_neuron'
            },
            'synapse_model': 'static_synapse'
        })
        topo.ConnectLayers(l, l, cdict)
        for k in ['sources', 'synapse_model']:
            cdict.pop(k)

        # connect ipa -> ipa using stdp_synapse
        cdict.update({
            'sources': {
                'model': 'iaf_psc_alpha'
            },
            'targets': {
                'model': 'iaf_psc_alpha'
            },
            'synapse_model': 'stdp_synapse'
        })
        topo.ConnectLayers(l, l, cdict)
        for k in ['sources', 'targets', 'synapse_model']:
            cdict.pop(k)

        t = topo.GetTargetNodes(ian[:1], l)
        self.assertEqual(len(t), 1)

        p = topo.GetTargetPositions(ian[:1], l)
        self.assertEqual(len(p), 1)
        self.assertTrue(all([len(pp) == 2 for pp in p[0]]))

        t = topo.GetTargetNodes(ian, l)
        self.assertEqual(len(t), len(ian))
        self.assertTrue(all(
            [len(g) == 8
             for g in t]))  # 2x2 mask x 2 neurons / element -> eight targets

        p = topo.GetTargetPositions(ian, l)
        self.assertEqual(len(p), len(ian))

        t = topo.GetTargetNodes(ian, l, tgt_model='iaf_neuron')
        self.assertEqual(len(t), len(ian))
        self.assertTrue(all([len(g) == 4
                             for g in t]))  # 2x2 mask  -> four targets

        t = topo.GetTargetNodes(ian, l, tgt_model='iaf_psc_alpha')
        self.assertEqual(len(t), len(ian))
        self.assertTrue(all([len(g) == 4
                             for g in t]))  # 2x2 mask  -> four targets

        t = topo.GetTargetNodes(ipa, l)
        self.assertEqual(len(t), len(ipa))
        self.assertTrue(all([len(g) == 4
                             for g in t]))  # 2x2 mask  -> four targets

        t = topo.GetTargetNodes(ipa, l, syn_model='static_synapse')
        self.assertEqual(len(t), len(ipa))
        self.assertTrue(all([len(g) == 0 for g in t]))  # no static syns

        t = topo.GetTargetNodes(ipa, l, syn_model='stdp_synapse')
        self.assertEqual(len(t), len(ipa))
        self.assertTrue(all([len(g) == 4
                             for g in t]))  # 2x2 mask  -> four targets
Esempio n. 8
0
    def test_SynapseDepressionFacilitation(self):
        """Ensure that depression and facilitation work correctly"""

        nest.set_verbosity('M_WARNING')
        nest.ResetKernel()

        resolution = 0.1
        nest.resolution = resolution

        '''
        neuron parameters
        '''
        nrn_model = 'pp_cond_exp_mc_urbanczik'
        nrn_params = {
            't_ref': 3.0,        # refractory period
            'g_sp': 600.0,       # somato-dendritic coupling conductance
            'soma': {
                'V_m': -70.0,    # initial value of V_m
                'C_m': 300.0,    # capacitance of membrane
                'E_L': -70.0,    # resting potential
                'g_L': 30.0,     # somatic leak conductance
                'E_ex': 0.0,     # resting potential for exc input
                'E_in': -75.0,   # resting potential for inh input
                'tau_syn_ex': 3.0,  # time constant of exc conductance
                'tau_syn_in': 3.0,  # time constant of inh conductance
            },
            'dendritic': {
                'V_m': -70.0,    # initial value of V_m
                'C_m': 300.0,    # capacitance of membrane
                'E_L': -70.0,    # resting potential
                'g_L': 30.0,     # dendritic leak conductance
                'tau_syn_ex': 3.0,  # time constant of exc input current
                'tau_syn_in': 3.0,  # time constant of inh input current
            },
            # parameters of rate function
            'phi_max': 0.15,     # max rate
            'rate_slope': 0.5,   # called 'k' in the paper
            'beta': 1.0 / 3.0,
            'theta': -55.0,
        }

        '''
        synapse params
        '''
        syns = nest.GetDefaults(nrn_model)['receptor_types']
        init_w = 100.0
        syn_params = {
            'synapse_model': 'urbanczik_synapse_wr',
            'receptor_type': syns['dendritic_exc'],
            'tau_Delta': 100.0,  # time constant of low pass filtering of the weight change
            'eta': 0.75,         # learning rate
            'weight': init_w,
            'Wmax': 4.5*nrn_params['dendritic']['C_m'],
            'delay': resolution,
        }

        '''
        neuron and devices
        '''
        nest.SetDefaults(nrn_model, nrn_params)
        nrn = nest.Create(nrn_model)

        # spike generator is connected to a parrot neuron which is connected to the mc neuron
        prrt_nrn = nest.Create('parrot_neuron')

        # excitatory input to the dendrite
        pre_syn_spike_times = np.array([1.0, 98.0])
        sg_prox = nest.Create('spike_generator', params={
                              'spike_times': pre_syn_spike_times})

        # excitatory input to the soma
        spike_times_soma_inp = np.arange(10.0, 50.0, resolution)
        spike_weights_soma = 10.0*np.ones_like(spike_times_soma_inp)
        sg_soma_exc = nest.Create('spike_generator',
                                  params={'spike_times': spike_times_soma_inp, 'spike_weights': spike_weights_soma})

        # for recording all parameters of the Urbanczik neuron
        rqs = nest.GetDefaults(nrn_model)['recordables']
        mm = nest.Create('multimeter', params={
                         'record_from': rqs, 'interval': 0.1})

        # for recoding the synaptic weights of the Urbanczik synapses
        wr = nest.Create('weight_recorder')

        # for recording the spiking of the soma
        sr_soma = nest.Create('spike_recorder')

        '''
        create connections
        '''
        nest.Connect(sg_prox, prrt_nrn, syn_spec={'delay': resolution})
        nest.CopyModel('urbanczik_synapse', 'urbanczik_synapse_wr',
                       {'weight_recorder': wr[0]})
        nest.Connect(prrt_nrn, nrn, syn_spec=syn_params)
        nest.Connect(sg_soma_exc, nrn,
                     syn_spec={'receptor_type': syns['soma_exc'], 'weight': 10.0*resolution, 'delay': resolution})
        nest.Connect(mm, nrn, syn_spec={'delay': resolution})
        nest.Connect(nrn, sr_soma, syn_spec={'delay': resolution})

        '''
        simulation
        '''
        nest.Simulate(100.0)

        '''
        read out devices
        '''
        # multimeter
        rec = nest.GetStatus(mm)[0]['events']
        t = rec['times']
        V_w = rec['V_m.p']

        # compute dendritic prediction of somatic membrane potential
        g_D = nrn_params['g_sp']
        g_L = nrn_params['soma']['g_L']
        E_L = nrn_params['soma']['E_L']
        V_w_star = (g_L*E_L + g_D*V_w) / (g_L + g_D)

        # weight recorder
        data = nest.GetStatus(wr)
        senders = data[0]['events']['senders']
        targets = data[0]['events']['targets']
        weights = data[0]['events']['weights']
        times = data[0]['events']['times']

        # spike recorder
        data = nest.GetStatus(sr_soma)[0]['events']
        spike_times_soma = data['times']

        # compute predicted rate
        phi_max = nrn_params['phi_max']
        k = nrn_params['rate_slope']
        beta = nrn_params['beta']
        theta = nrn_params['theta']
        rate = (phi_max / (1.0 + k*np.exp(beta*(theta - V_w_star))))

        # compute h(V_w_star)
        h = (15.0*beta / (1.0 + np.exp(-beta*(theta - V_w_star)) / k))

        # compute alpha response kernel
        tau_s = nrn_params['dendritic']['tau_syn_ex']
        g_L_prox = nrn_params['dendritic']['g_L']
        C_m_prox = nrn_params['dendritic']['C_m']
        tau_L = C_m_prox / g_L_prox
        E_L_prox = nrn_params['dendritic']['E_L']
        t0 = 1.2
        alpha_response = (np.heaviside(t - t0, 0.5)*tau_s*(np.exp(-(t - t0) / tau_L) - np.exp(-(t - t0) / tau_s)) /
                          (g_L_prox*(tau_L - tau_s)))

        # compute PI(t)
        if len(spike_times_soma) > 0:
            t = np.around(t, 4)
            spike_times_soma = np.around(spike_times_soma + 0.2, 4)
            idx = np.nonzero(np.in1d(t, spike_times_soma))[0]
            rate[idx] -= 1.0 / resolution

        w_change_raw = -15.0*C_m_prox*rate*h*alpha_response

        # compute low pass filtered version of PI
        tau_Delta = syn_params['tau_Delta']
        eta = syn_params['eta']
        w_change_low_pass = eta * np.exp(-t / tau_Delta)*np.cumsum(
            np.exp(t / tau_Delta)*w_change_raw)*resolution / tau_Delta
        integrated_w_change = np.cumsum(w_change_low_pass)*resolution
        syn_weight_comp = init_w + integrated_w_change

        '''
        comparison between Nest and python implementation
        '''
        # extract the weight computed in python at the times of the presynaptic spikes
        idx = np.nonzero(np.in1d(np.around(t, 4), np.around(pre_syn_spike_times + resolution, 4)))[0]
        syn_w_comp_at_spike_times = syn_weight_comp[idx]
        realtive_error = (
            (weights[-1] - syn_w_comp_at_spike_times[-1]) / (weights[-1] - init_w))

        self.assertTrue(abs(realtive_error) < 0.001)
def train(settings, data):
    np.random.seed()
    rank = nest.Rank()
    rng = np.random.randint(500)
    num_v_procs = settings['network']['num_threads'] \
                * settings['network']['num_procs']

    nest.ResetKernel()
    nest.SetKernelStatus({
        'local_num_threads':
        settings['network']['num_threads'],
        'total_num_virtual_procs':
        num_v_procs,
        'resolution':
        settings['network']['h'],
        'rng_seeds':
        range(rng, rng + num_v_procs)
    })

    layer_out = nest.Create('iaf_psc_exp', settings['topology']['n_layer_out'])
    if settings['topology']['two_layers']:
        layer_hid = nest.Create('iaf_psc_exp',
                                settings['topology']['n_layer_hid'])

    teacher_1 = nest.Create('step_current_generator',
                            settings['topology']['n_layer_out'])
    spike_generators_1 = nest.Create('spike_generator',
                                     settings['topology']['n_input'])
    poisson_layer = nest.Create('poisson_generator',
                                settings['topology']['n_input'])
    parrot_layer = nest.Create('parrot_neuron',
                               settings['topology']['n_input'])

    spike_detector_1 = nest.Create('spike_detector')
    spike_detector_2 = nest.Create('spike_detector')
    spike_detector_3 = nest.Create('spike_detector')

    voltmeter = nest.Create('voltmeter', 1, {
        'withgid': True,
        'withtime': True
    })

    if not settings['network']['noise_after_pattern']:
        nest.SetStatus(poisson_layer, {
            'rate': settings['network']['noise_freq'],
            'origin': 0.0
        })

    nest.Connect(spike_generators_1,
                 parrot_layer,
                 'one_to_one',
                 syn_spec='static_synapse')
    nest.Connect(poisson_layer,
                 parrot_layer,
                 'one_to_one',
                 syn_spec='static_synapse')

    if settings['learning']['use_teacher']:
        nest.Connect(teacher_1,
                     layer_out,
                     'one_to_one',
                     syn_spec='static_synapse')

    nest.Connect(layer_out, spike_detector_1, 'all_to_all')
    nest.Connect(parrot_layer, spike_detector_2, 'all_to_all')
    nest.Connect(voltmeter, layer_out)

    nest.SetStatus(layer_out, settings['model']['neuron_out'])

    if settings['topology']['two_layers']:
        if settings['learning']['use_inhibition']:
            interconnect_layer(layer_hid, settings['model']['syn_dict_inh'])
            # nest.Connect(layer_out, layer_hid,
            #              'all_to_all', syn_spec=settings['syn_dict_inh'])

        nest.Connect(parrot_layer, spike_detector_3, 'all_to_all')
        nest.Connect(parrot_layer,
                     layer_hid,
                     'all_to_all',
                     syn_spec=settings['model']['syn_dict_stdp_hid'])
        nest.Connect(layer_hid,
                     layer_out,
                     'all_to_all',
                     syn_spec=settings['model']['syn_dict_stdp'])
        if settings['topology']['use_reciprocal']:
            nest.Connect(layer_out,
                         layer_hid,
                         'all_to_all',
                         syn_spec=settings['model']['syn_dict_rec'])
        nest.Connect(layer_hid, spike_detector_3, 'all_to_all')
        nest.SetStatus(layer_hid, settings['model']['neuron_hid'])
    else:
        nest.Connect(parrot_layer,
                     layer_out,
                     'all_to_all',
                     syn_spec=settings['model']['syn_dict_stdp'])

    if settings['topology']['use_inhibition']:
        interconnect_layer(layer_out, settings['model']['syn_dict_inh'])

    np.random.seed(500)

    i = 0
    hi = 1
    last_norms = []
    norm_history = []
    output_latency = []
    weights_history = []

    early_stop = False
    d_time = settings['network']['start_delta']
    full_time = settings['learning']['epochs'] \
              * len(data['input']) \
              * settings['network']['h_time'] \
              + settings['network']['start_delta']

    # if settings['two_layers']:
    #     initial_weights = save_weigths_two_layers(parrot_layer, layer_hid, layer_out, settings)
    # else:
    #     initial_weights = save_weights_one_layer(parrot_layer, layer_out, settings)

    nest.Simulate(settings['network']['start_delta'])
    while not early_stop:
        set_spike_in_generators(data['input'][i], spike_generators_1, d_time,
                                d_time + settings['network']['h_time'],
                                settings['network']['h_time'],
                                settings['network']['h'])
        # if True:
        #     set_spike_in_generators(data['input'][i], spike_generators_1,
        #                             d_time + 1.5, d_time + settings['h_time'] + 1.5,
        #                             settings['h_time'], settings['h'])
        #     set_spike_in_generators(data['input'][i], spike_generators_1,
        #                             d_time + 3.0, d_time + settings['h_time'] + 3.0,
        #                             settings['h_time'], settings['h'])

        spike_times = []
        for neuron_number in data['input'][i]:
            if data['input'][i][neuron_number]:
                spike_times.append(data['input'][i][neuron_number][0])

        if settings['learning']['use_teacher']:
            if settings['topology']['n_layer_out'] == 1:
                set_teacher_input(
                    np.min(spike_times) \
                    + d_time \
                    + settings['network']['h'] \
                    + settings['learning']['reinforce_delta'],
                    teacher_1,
                    settings
                )
            else:
                set_teacher_input(
                    np.min(spike_times) \
                    + d_time \
                    + settings['network']['h'] \
                    + settings['learning']['reinforce_delta'],
                    [teacher_1[data['class'][i]]],
                    settings
                )

        if settings['network']['noise_after_pattern']:
            nest.SetStatus(
                poisson_layer, {
                    'start': d_time + np.max(spike_times),
                    'stop': float(d_time + settings['network']['h_time']),
                    'rate': settings['network']['noise_freq']
                })

        nest.Simulate(settings['network']['h_time'])

        ex_class = data['class'][i]
        spikes = nest.GetStatus(spike_detector_1, keys="events")[0]['times']
        senders = nest.GetStatus(spike_detector_1, keys="events")[0]['senders']
        mask = spikes > d_time
        spikes = spikes[mask]
        senders = senders[mask]
        tmp_dict = {
            'latency': spikes - d_time,
            'senders': senders,
            'class': ex_class
        }

        output_latency.append(tmp_dict)

        d_time += settings['network']['h_time']
        if i + hi + 1 > len(data['input']):
            i = 0
        else:
            i += hi

        if settings['network']['save_history']:
            if settings['topology']['two_layers']:
                tmp_weights = save_weigths_two_layers(parrot_layer, layer_hid,
                                                      layer_out, settings)
                tmp_norm_hid = weight_norm(tmp_weights['layer_hid'])
                tmp_norm_out = weight_norm(tmp_weights['layer_out'])
                tmp_norm = np.linalg.norm([tmp_norm_hid, tmp_norm_out])
                norm_history.append(tmp_norm)
            else:
                tmp_weights = save_weights_one_layer(parrot_layer, layer_out,
                                                     settings)
                tmp_norm_out = weight_norm(tmp_weights['layer_out'])
                norm_history.append(tmp_norm_out)
            weights_history.append(tmp_weights)

        #     if len(norm_history) > 5 * len(data['input']) and settings['early_stop']:
        #         early_stop = np.std(norm_history[-5 * len(data['input']):]) < 0.025
        # else:
        early_stop = d_time > full_time

    if settings['topology']['two_layers']:
        weights = save_weigths_two_layers(parrot_layer, layer_hid, layer_out,
                                          settings)
    else:
        weights = save_weights_one_layer(parrot_layer, layer_out, settings)

    # print(weights['layer_out'].keys())
    # with open('weights' + str(weights['layer_out'].keys()) + '.json', 'w') as outfile:
    #     json.dump(weights, outfile, indent=4)
    devices = {
        'voltmeter': voltmeter,
        'spike_detector_1': spike_detector_1,
        'spike_detector_2': spike_detector_2,
        'spike_detector_3': spike_detector_3,
    }
    return weights, output_latency, devices, weights_history, norm_history
Esempio n. 10
0
def bench(config, order):

    simtime = 1000
    dt = 1.0
    delay = 2.0 # synaptic delay in ms

    g = 5.0 # ratio inhibitory weight/excitatory weight
    eta = 2.0 # external rate relative to threshold rate
    epsilon = 0.1 # connection probability
    NE = 4*order # number of excitatory neurons
    NI = 1*order # number of inhibitory neurons
    N_rec = 50 # record from 50 neurons
    CE = int(epsilon*NE) # number of excitatory synapses per neuron
    CI = int(epsilon*NI) # number of inhibitory synapses per neuron
    tauMem = 20.0 # time constant of membrane potential in ms
    theta = 20.0 # membrane threshold potential in mV
    neuron_params = {
        "C_m": 1.0,
        "tau_m": tauMem,
        "t_ref": 2.0,
        "E_L": 0.0,
        "V_reset": 0.0,
        "V_m": 0.0,
        "V_th": theta,
    }
    J = 0.1 # postsynaptic amplitude in mV
    J_ex = J # amplitude of excitatory postsynaptic potential
    J_in = -g*J_ex # amplitude of inhibitory postsynaptic potential
    nu_th = theta/(J*CE*tauMem)
    nu_ex = eta*nu_th
    p_rate = 1000.0*nu_ex*CE

    nest.ResetKernel()

    nest.SetKernelStatus({"resolution": dt, "print_time": True })
    nest.SetDefaults("iaf_psc_delta", neuron_params)
    nest.SetDefaults("poisson_generator",{"rate": p_rate})

    nodes_ex = nest.Create("iaf_psc_delta",NE)
    nodes_in = nest.Create("iaf_psc_delta",NI)
    noise    = nest.Create("poisson_generator")
    espikes  = nest.Create("spike_detector")
    ispikes  = nest.Create("spike_detector")

    nest.SetStatus(espikes,[{"label": "brunel-py-ex",
                             "withtime": True,
                             "withgid": True}])

    nest.SetStatus(ispikes,[{"label": "brunel-py-in",
                             "withtime": True,
                             "withgid": True}])

    nest.CopyModel("static_synapse","excitatory",{"weight":J_ex, "delay":delay})
    nest.CopyModel("static_synapse","inhibitory",{"weight":J_in, "delay":delay})

    nest.Connect(noise,nodes_ex, syn_spec="excitatory")
    nest.Connect(noise,nodes_in, syn_spec="excitatory")
    nest.Connect(nodes_ex[:N_rec], espikes, syn_spec="excitatory")
    nest.Connect(nodes_in[:N_rec], ispikes, syn_spec="excitatory")

    conn_params_ex = {'rule': 'fixed_indegree', 'indegree': CE}
    conn_params_in = {'rule': 'fixed_indegree', 'indegree': CI}

    if config == 1:
        nest.Connect(nodes_ex, nodes_ex+nodes_in, conn_params_ex, "excitatory")
        nest.Connect(nodes_in, nodes_ex+nodes_in, conn_params_in, "inhibitory")
    elif config == 2:
        stdp_synapse = {
            "model": "stdp_triplet_all_in_one_synapse",
            "tau_plus": 16.8,
            "tau_plus_triplet": 101.0,
            "tau_minus": 33.7,
            "tau_minus_triplet": 125.0,
            "Aplus": 5e-10,
            "Aminus": 7e-3,
            "Aplus_triplet": 6.2e-3,
            "Aminus_triplet": 2.3e-4,
            "Kplus": 0.0,
            "Kplus_triplet": 0.0,
            "Kminus": 0.0,
            "Kminus_triplet": 0.0,
            "delay": delay,
        }

        stdp_excitatory = stdp_synapse.copy()
        stdp_excitatory.update({
            "Wmin": J_ex,
            "weight": J_ex,
            "Wmax": J_ex,
        })
        stdp_inhibitory = stdp_synapse.copy()
        stdp_inhibitory.update({
            "Wmin": J_in,
            "weight": J_in,
            "Wmax": J_in,
        })

        nest.Connect(nodes_ex, nodes_ex+nodes_in, conn_params_ex, stdp_excitatory)
        nest.Connect(nodes_in, nodes_ex+nodes_in, conn_params_in, stdp_inhibitory)
    elif config == 3:
        stdp_neuron = {
            "tau_plus": 16.8,
            "tau_plus_triplet": 101.0,
            "tau_minus": 33.7,
            "tau_minus_triplet": 125.0,
            "Aplus": 5e-10,
            "Aminus": 7e-3,
            "Aplus_triplet": 6.2e-3,
            "Aminus_triplet": 2.3e-4,
            "Kplus": 0.0,
            "Kplus_triplet": 0.0,
            "Kminus": 0.0,
            "Kminus_triplet": 0.0,
            "nearest_spike": False,
        }

        stdp_excitatory = stdp_neuron.copy()
        stdp_excitatory.update({
            "Wmin": J_ex,
            "weight": J_ex,
            "Wmax": J_ex,
        })
        stdp_inhibitory = stdp_neuron.copy()
        stdp_inhibitory.update({
            "Wmin": J_in,
            "weight": J_in,
            "Wmax": J_in,
        })

        min_delay = nest.GetKernelStatus('resolution')
        pre_syn_spec = { "delay": min_delay }
        post_syn_spec = {
            "delay": delay - min_delay,
            "receptor_type": 1 # differentiate post-synaptic feedback
        }

        for post in nodes_ex+nodes_in:
            syn_post_spec = {
                "weight": J_ex,
                "delay": delay - min_delay
            }
            selected_ex = random.sample(filter(lambda n: n != post, nodes_ex), CE)
            synapses_ex = nest.Create("stdp_triplet_node", CE, params = stdp_excitatory)
            nest.Connect(selected_ex, synapses_ex, 'one_to_one', pre_syn_spec)
            nest.Connect(synapses_ex, (post,), 'all_to_all', syn_post_spec)
            nest.Connect((post,), synapses_ex, 'all_to_all', post_syn_spec)

        for post in nodes_ex+nodes_in:
            syn_post_spec = {
                "weight": J_in,
                "delay": delay - min_delay
            }
            selected_in = random.sample(filter(lambda n: n != post, nodes_in), CI)
            synapses_in = nest.Create("stdp_triplet_node", CI, params = stdp_inhibitory)
            nest.Connect(selected_in, synapses_in, 'one_to_one', pre_syn_spec)
            nest.Connect(synapses_in, (post,), 'all_to_all', syn_post_spec)
            nest.Connect((post,), synapses_in, 'all_to_all', post_syn_spec)

    else:
        raise Exception('unknown config')

    nest.Simulate(simtime)
    def run_protocol(self, dt):
        """Set up a network with pre-post spike pairings with t_post - t_pre = dt"""

        nest.set_verbosity("M_WARNING")
        nest.ResetKernel()

        # set pre and postsynaptic spike times
        delay = 1.  # delay for connections
        dspike = 100.  # ISI

        # set the correct real spike times for generators (correcting for delays)
        pre_times = [100., 100. + dspike]
        post_times = [k + dt for k in pre_times]

        # create spike_generators with these times
        pre_spikes = nest.Create("spike_generator",
                                 params={
                                     "spike_times": pre_times,
                                     'precise_times': True
                                 })
        post_spikes = nest.Create("spike_generator",
                                  params={
                                      "spike_times": post_times,
                                      'precise_times': True
                                  })

        # create parrot neurons and connect spike_generators
        pre_parrot = nest.Create("parrot_neuron_ps", 1)
        post_parrot = nest.Create("parrot_neuron_ps", 1)

        nest.Connect(pre_spikes, pre_parrot, syn_spec={"delay": delay})
        nest.Connect(post_spikes, post_parrot, syn_spec={"delay": delay})

        # create spike detector
        spikes = nest.Create("spike_detector", params={'precise_times': True})
        nest.Connect(pre_parrot, spikes)
        nest.Connect(post_parrot, spikes)

        # connect both parrot neurons with a stdp synapse onto port 1
        # thereby spikes transmitted through the stdp connection are
        # not repeated postsynaptically.
        syn_spec = {
            "model": "stdp_synapse",
            "receptor_type":
            1,  # set receptor 1 postsynaptically, to not generate extra spikes
        }
        conn_spec = {
            "rule": "one_to_one",
        }
        nest.Connect(pre_parrot,
                     post_parrot,
                     syn_spec=syn_spec,
                     conn_spec=conn_spec)

        # get STDP synapse and weight before protocol
        syn = nest.GetConnections(source=pre_parrot,
                                  synapse_model="stdp_synapse")
        syn_status = nest.GetStatus(syn)[0]
        w_pre = syn_status['weight']

        last_time = max(pre_times[-1], post_times[-1])
        nest.Simulate(last_time + 2 * delay)

        # get weight post protocol
        syn_status = nest.GetStatus(syn)[0]
        w_post = syn_status['weight']

        return w_pre, w_post
Esempio n. 12
0
    help=
    'Length, in milliseconds, of experiment. Must be an integer. Default is 100.'
)
parser.add_argument(
    '--no_mc2approx',
    action='store_true',
    help='If not included, uses mc2 data for a better approximation.')
parser.add_argument(
    '--shuffle',
    action='store_true',
    help='If included, randomly shuffles the mc2 adjacency matrix.')
args = parser.parse_args()

# Set up
#nest.set_verbosity("M_ERROR")                                              # Uncomment this to make NEST quiet
nest.ResetKernel()  # Reset nest
nest.SetKernelStatus({"local_num_threads": 8})  # Run on many threads
root = sys.argv[0][:-11]  # Current working directory
simulation_id = datetime.now().strftime(
    "%s")  # Custom ID so files are not overwritten

# Load circuit info
ntnstatus('Loading mc2 structural information')
nnum = 31346  # Number of neurons in circuit
adj = load_npz(root + 'structure/adjmat_mc2.npz').toarray()  # Adjacency matrix
exc = np.load(root + 'structure/bbmc2_excitatory.npy'
              )  # Binary list indicating if neuron is excitatory or not
mc2_delays = np.load(root + 'structure/distances_mc2.npz')[
    'data']  # Interpret distances as delays
mc2_layers = np.load(
    root + 'structure/layersize_mc2.npy'
Esempio n. 13
0
    def __init__(self,prefix,new_pars=[],pars_file=[]):
        '''Creates and simulates a network in NEST'''
        
        #Temporary way to run without the selected pars file
        pars_file = []
        start_build_net = time.time()
        
        if pars_file==[]:	# import generic params_d file
	    import params_d_psdb
	    reload(params_d_psdb)
	    pars = params_d_psdb.Parameters(new_pars)		
	else: 			# import specific params_d file
	    fobj,pathname,description = imp.find_module(pars_file)
	    params_d_sp = imp.load_module(pars_file,fobj,pathname,description)
	    pars = params_d_sp.Parameters(new_pars)

	
	self.T_sim = pars.T_sim + pars.T_wup + pars.T_cdown
	#self.record_spikes = pars.record_spikes
	self.record_vm = pars.record_vm
	self.recorders = {}
	self.events = {'spikes':[],'vm':[]}	
	self.pars = pars
	self.pars.prefix = prefix	
	
	# INITIALIZE NETWORK -----------------------------------------------------------------------	        
        nest_path_tmp = tempfile.mktemp(prefix=pars.nest_path_tmp)
        os.mkdir(nest_path_tmp)
        nest.ResetKernel()
        shutil.rmtree(nest.GetStatus([0],'data_path')[0],ignore_errors=True)
        nest.SetStatus([0], {'resolution': pars.dt, 'print_time': pars.print_time,
        'overwrite_files':pars.owr_files, 'rng_seeds':[int(pars.rnd_seeds)],
        'data_path':nest_path_tmp})
        
        #print '\nBuilding network...'
        
        # CREATE SOURCES ----------------------------------------------------------------------------
        self.pg_exc = nest.Create('poisson_generator', 1)
        self.pg_inh = nest.Create('poisson_generator', 1)
        nest.SetStatus(self.pg_exc, {'rate': pars.pg_rate_exc, 'stop': pars.T_sim+pars.T_wup})
        nest.SetStatus(self.pg_inh, {'rate': pars.pg_rate_inh, 'stop': pars.T_sim+pars.T_wup})
                
        self.dc1_exc = nest.Create('dc_generator',1)
        nest.SetStatus(self.dc1_exc, pars.dc1_pars)
        
        self.dc2_exc = nest.Create('dc_generator',1)
        nest.SetStatus(self.dc2_exc, pars.dc2_pars)
        
         # CREATE POPULATIONS -----------------------------------------------------------------------
	#print 'Creating populations...\n'
	
	
	neurons_exc = []
	self.pops_exc = range(len(pars.N_exc))
	for ii,nr in enumerate(pars.N_exc):
	  self.pops_exc[ii] = nest.Create(pars.model_type, abs(nr))
	  neurons_exc.extend(self.pops_exc[ii])
	  
	#  set neuron parameters	for every population independently              
	for ntypes in range(len(pars.N_exc)):
	  nest.SetStatus(self.pops_exc[ntypes], pars.neuron_params_exc[ntypes])
	  
	
	if pars.rnd_dist:
	  nest.SetStatus(neurons_inh,'tau_m',pars.tau_m_rnd)
	  
	neurons_inh = []
	self.pops_inh = range(len(pars.N_inh))
	for ii,nr in enumerate(pars.N_inh):
	  self.pops_inh[ii] = nest.Create(pars.model_type, abs(nr))
	  neurons_inh.extend(self.pops_inh[ii])
	  
	#  set neuron parameters	for every population independently              
	for ntypes in range(len(pars.N_inh)):
	  nest.SetStatus(self.pops_inh[ntypes], pars.neuron_params_inh[ntypes])
	  
	
	if pars.rnd_dist:
	  nest.SetStatus(neurons_inh,'tau_m',pars.tau_m_rnd)
	  

	if pars.change_type:
	  self.time_lis = [pars.chg_time,pars.T_sim]
	  
	  
	self.pops = self.pops_exc + self.pops_inh
	
	self.pops_exc = [item for sublist in self.pops_exc for item in sublist]
	self.pops_inh = [item for sublist in self.pops_inh for item in sublist]
	    
	 # Make connections -------------------------------------------------------------------------
	#total_neu = [item for sublist in self.pops for item in sublist]
	self.pars.neurons_tot = len(self.pops_exc) + len(self.pops_inh) 

	self.pars.pops_exc = self.pops_exc
	self.pars.pops_inh = self.pops_inh
	
	nest.SetStatus(self.pops_exc, params = {'tau_m':pars.tau_m_exc,'C_m':pars.C_m_exc,'tau_syn':pars.tau_syn_ex})
	
	nest.DivergentConnect(self.pg_exc, self.pops_exc, weight = pars.J_ext, delay = pars.min_del)
 
	nest.DivergentConnect(self.pg_inh, self.pops_inh, weight = pars.J_ext, delay = pars.min_del)

         #STN connections
	num_stn_gpe = int(pars.epsilon_stn_gpe * len(self.pops_inh))
	nest.RandomDivergentConnect(self.pops_exc,self.pops_inh, num_stn_gpe, weight = pars.J_stn_gpe, delay = pars.delay_inter) 
	num_stn_stn = int(pars.epsilon_stn_stn* len(self.pops_exc))
	nest.RandomDivergentConnect(self.pops_exc,self.pops_exc, num_stn_stn, weight = pars.J_stn_stn, delay = pars.delay_intra) 

	#GPE connections
	num_gpe_gpe = int(pars.epsilon_gpe_gpe * len(self.pops_inh))
	nest.RandomDivergentConnect(self.pops_inh,self.pops_inh, num_gpe_gpe, weight = pars.J_gpe_gpe, delay = pars.delay_intra) 
	num_gpe_stn = int(pars.epsilon_gpe_stn* len(self.pops_exc))
	nest.RandomDivergentConnect(self.pops_inh,self.pops_exc, num_gpe_stn, weight = pars.J_gpe_stn, delay = pars.delay_inter)

	if pars.add_spikes == 1:
	    extra_spike = nest.Create('spike_generator',int(pars.num_gen))
	    def spike_times():
	      spt = np.random.uniform(pars.st_val+pars.beg_width,pars.st_val + pars.ex_width,1)
	      spike_times = (np.sort(np.random.uniform(spt,spt + pars.dur ,pars.num_spk))).tolist()     
	      return spike_times
	    

	    for ii in extra_spike:
	      spike_time = spike_times()
	      spike_time = np.around(spike_time,1)
	      nest.SetStatus([ii],params = {'spike_times':(np.array(spike_time)).flatten()})
	    
	    if self.pars.extra_spk == 'exc':
	      nest.RandomDivergentConnect(extra_spike,self.pops_inh,int(len(self.pops_inh)*pars.epsilon_stn_gpe),weight = pars.J_stn_gpe, delay = pars.delay_inter)
	      nest.RandomDivergentConnect(extra_spike,self.pops_exc,int(len(self.pops_exc)*pars.epsilon_stn_stn),weight = pars.J_stn_stn, delay = pars.delay_intra)
	    elif self.pars.extra_spk == 'inh':
	      nest.RandomDivergentConnect(extra_spike,self.pops_inh,int(len(self.pops_inh)*pars.epsilon_stn_gpe),weight = pars.J_gpe_stn, delay = pars.delay_inter)
	      nest.RandomDivergentConnect(extra_spike,self.pops_exc,int(len(self.pops_exc)*pars.epsilon_stn_stn),weight = pars.J_gpe_gpe, delay = pars.delay_intra)
 
	
	
#	pops_ch = self.pops_exc + self.pops_inh 
#	pops_ch = self.pops_exc + self.pops_inh 
#	pops_rd = random.sample(pops_ch,500)
	#CREATE RECORDERS----------------------------------------------------------------------------
	self.record_spikes = list(np.arange(self.pops_exc[0],self.pops_inh[-1]+1))
	#self.record_spikes_new = random.sample(self.record_spikes,1000) 
#	self.record_spikes = pops_rd
	#if self.record_spikes!= []:
	sd = nest.Create('spike_detector',1)
	nest.SetStatus(sd,{'to_file':True,'to_memory':True})
	nest.ConvergentConnect(self.record_spikes,sd)
	self.recorders['sd'] = sd
	
	
	if self.pars.record_vm != []:
	    vm = nest.Create('voltmeter',1)
	    #print 'Id of vm recorder: ',vm
	    nest.SetStatus(vm,{'withtime':True,'withgid':True,'to_file':True,'to_memory':False})
	    nest.DivergentConnect(vm,self.pars.record_vm)
	    nest.SetStatus(self.pars.record_vm,{'V_th':1000.}) # record free Vm
	    self.recorders['vm'] = vm
	    
	self.build_net_time = time.time()-start_build_net
Esempio n. 14
0
 def __init__(self):
     nest.ResetKernel()
     nest.SetKernelStatus({
         "resolution": 0.1,
         "print_time": True,
         "overwrite_files": True,
         "local_num_threads": 8
     })
     nest.CopyModel('iaf_psc_alpha', 'exci')
     nest.CopyModel('iaf_psc_alpha', 'inhi')
     nest.CopyModel('static_synapse', 'exc', {'weight': 5.0})
     nest.CopyModel('static_synapse', 'inh', {'weight': -5.0})
     self.l = tp.CreateLayer({
         'rows': 90,
         'columns': 90,
         'elements': ['exci', 5, 'inhi', 5],
         'edge_wrap': False
     })
     cdict = {
         'connection_type': 'divergent',
         'mask': {
             'circular': {
                 'radius': 0.2
             }
         },
         'kernel': {
             'gaussian': {
                 'p_center': 0.8,
                 'sigma': 0.075
             }
         },
         'delays': {
             'linear': {
                 'c': 2.0,
                 'a': 0.02
             }
         },
         'sources': {
             'model': 'exci'
         },
         'targets': {
             'model': 'inhi'
         },
         'synapse_model': 'exc'
     }
     tp.ConnectLayers(self.l, self.l, cdict)
     self.rec_ex = tp.CreateLayer({
         'rows': 1,
         'columns': 1,
         'elements': 'spike_detector'
     })
     cdict_rec_ex = {
         'connection_type': 'convergent',
         'sources': {
             'model': "exci"
         }
     }
     tp.ConnectLayers(self.l, self.rec_ex, cdict_rec_ex)
     # Background stimulation
     stim = tp.CreateLayer({
         'rows': 1,
         'columns': 1,
         'elements': 'poisson_generator'
     })
     stim_i = nest.GetLeaves(stim, local_only=True)[0]
     nest.SetStatus(stim_i, {'rate': 30000.})
     background_stim_dict = {
         'connection_type': 'divergent',
         'mask': {
             'grid': {
                 'rows': 90,
                 'columns': 90
             }
         },
         'synapse_model': 'exc'
     }
     tp.ConnectLayers(stim, self.l, background_stim_dict)
     nest.Simulate(2000.)
     rec_ex_true = nest.GetLeaves(self.rec_ex, local_only=True)[0]
     self.events_ex = nest.GetStatus(rec_ex_true, "events")[0]
Esempio n. 15
0
 def prepare_simulation(self):
     nest.ResetKernel()
     nest.set_verbosity('M_ERROR')
     '''
     We set global kernel parameters. Here we define the resolution
     for the simulation, which is also the time resolution for the update
     of the synaptic elements.
     '''
     nest.SetKernelStatus({'resolution': self.dt})
     '''
     Set Number of virtual processes. Remember SP does not work well with openMP right now, so calls must always be done using mpiexec
     '''
     nest.SetKernelStatus({'total_num_virtual_procs': self.comm.Get_size()})
     print("Total number of virtual processes set to: " +
           str(self.comm.Get_size()))
     '''
     Set Structural Plasticity synaptic update interval which is how often
     the connectivity will be updated inside the network. It is important
     to notice that synaptic elements and connections change on different
     time scales.
     '''
     nest.SetStructuralPlasticityStatus({
         'structural_plasticity_update_interval':
         self.update_interval,
         'tau_Ca':
         10000.0,
         'beta_Ca':
         0.001,
     })
     '''
     Now we define Structural Plasticity synapses. In this example we create
     two synapse models, one for excitatory and one for inhibitory synapses.
     Then we define that excitatory synapses can only be created between a
     pre synaptic element called 'Axon_ex' and a post synaptic element
     called Den_ex. In a similar manner, synaptic elements for inhibitory
     synapses are defined.
     '''
     spsyn_names = ['synapse_in' + str(nam) for nam in range(self.regions)]
     spsyn_names_e = [
         'synapse_ex' + str(nam) for nam in range(self.regions)
     ]
     sps = {}
     for x in range(0, self.regions):
         nest.CopyModel('static_synapse', 'synapse_in' + str(x))
         nest.SetDefaults('synapse_in' + str(x), {
             'weight': self.psc_i,
             'delay': 1.0
         })
         nest.CopyModel('static_synapse', 'synapse_ex' + str(x))
         nest.SetDefaults('synapse_ex' + str(x), {
             'weight': self.psc_e,
             'delay': 1.0
         })
         sps[spsyn_names[x]] = {
             'model': 'synapse_in' + str(x),
             'post_synaptic_element': 'Den_in' + str(x),
             'pre_synaptic_element': 'Axon_in' + str(x),
         }
         sps[spsyn_names_e[x]] = {
             'model': 'synapse_ex' + str(x),
             'post_synaptic_element': 'Den_ex' + str(x),
             'pre_synaptic_element': 'Axon_ex' + str(x),
         }
     nest.SetStructuralPlasticityStatus(
         {'structural_plasticity_synapses': sps})
def test(settings, data, weights):
    np.random.seed()
    rank = nest.Rank()
    rng = np.random.randint(500)
    num_v_procs = settings['network']['num_threads'] \
                * settings['network']['num_procs']

    nest.ResetKernel()
    nest.SetKernelStatus({
        'local_num_threads':
        settings['network']['num_threads'],
        'total_num_virtual_procs':
        num_v_procs,
        'resolution':
        settings['network']['h'],
        'rng_seeds':
        range(rng, rng + num_v_procs)
    })

    layer_out = nest.Create('iaf_psc_exp', settings['topology']['n_layer_out'])
    if settings['topology']['two_layers']:
        layer_hid = nest.Create('iaf_psc_exp',
                                settings['topology']['n_layer_hid'])

    spike_generators_1 = nest.Create('spike_generator',
                                     settings['topology']['n_input'])
    poisson_layer = nest.Create('poisson_generator',
                                settings['topology']['n_input'])
    parrot_layer = nest.Create('parrot_neuron',
                               settings['topology']['n_input'])

    spike_detector_1 = nest.Create('spike_detector')
    spike_detector_2 = nest.Create('spike_detector')
    spike_detector_3 = nest.Create('spike_detector')

    voltmeter = nest.Create('voltmeter', 1, {
        'withgid': True,
        'withtime': True
    })
    nest.Connect(spike_generators_1,
                 parrot_layer,
                 'one_to_one',
                 syn_spec='static_synapse')

    if settings['network']['test_with_noise']:
        nest.SetStatus(poisson_layer,
                       {'rate': settings['network']['noise_freq']})
        nest.Connect(poisson_layer,
                     parrot_layer,
                     'one_to_one',
                     syn_spec='static_synapse')

    nest.Connect(layer_out, spike_detector_1, 'all_to_all')
    nest.Connect(parrot_layer, spike_detector_2, 'all_to_all')
    nest.Connect(voltmeter, layer_out)

    nest.SetStatus(layer_out, settings['model']['neuron_out'])

    if settings['topology']['two_layers']:
        if settings['topology']['use_inhibition']:
            interconnect_layer(layer_hid, settings['model']['syn_dict_inh'])
            # nest.Connect(layer_out, layer_hid,
            #              'all_to_all', syn_spec=settings['syn_dict_inh'])

        nest.Connect(parrot_layer,
                     layer_hid,
                     'all_to_all',
                     syn_spec='static_synapse')
        nest.Connect(layer_hid,
                     layer_out,
                     'all_to_all',
                     syn_spec='static_synapse')
        nest.Connect(layer_hid, spike_detector_3, 'all_to_all')
        nest.SetStatus(layer_hid, settings['model']['neuron_hid'])
    else:
        if settings['topology']['use_inhibition']:
            interconnect_layer(layer_out, settings['model']['syn_dict_inh'])
        nest.Connect(parrot_layer,
                     layer_out,
                     'all_to_all',
                     syn_spec='static_synapse')

    if settings['topology']['two_layers']:
        for neuron_id in weights['layer_hid']:
            connection = nest.GetConnections(parrot_layer, target=[neuron_id])
            nest.SetStatus(connection, 'weight',
                           weights['layer_hid'][neuron_id])

        for neuron_id in weights['layer_out']:
            connection = nest.GetConnections(layer_hid, target=[neuron_id])
            nest.SetStatus(connection, 'weight',
                           weights['layer_out'][neuron_id])
    else:
        for neuron_id in weights['layer_out']:
            connection = nest.GetConnections(parrot_layer, target=[neuron_id])
            nest.SetStatus(connection, 'weight',
                           weights['layer_out'][neuron_id])

    np.random.seed(500)
    output_latency = []
    d_time = settings['network']['start_delta']
    nest.Simulate(settings['network']['start_delta'])

    for example, examples_class in zip(data['input'], data['class']):
        set_spike_in_generators(example, spike_generators_1, d_time,
                                d_time + settings['network']['h_time'],
                                settings['network']['h_time'],
                                settings['network']['h'])
        # nest.SetStatus(poisson_layer, {'start': 30.})
        nest.Simulate(settings['network']['h_time'])
        d_time += settings['network']['h_time']

    spikes = nest.GetStatus(spike_detector_1,
                            keys="events")[0]['times'].tolist()
    senders = nest.GetStatus(spike_detector_1,
                             keys="events")[0]['senders'].tolist()

    output_latency = {'spikes': spikes, 'senders': senders}

    devices = {
        'voltmeter': voltmeter,
        'spike_detector_1': spike_detector_1,
        'spike_detector_2': spike_detector_2,
        'spike_detector_3': spike_detector_3,
    }
    return output_latency, devices
 def setUp(self):
     nest.ResetKernel()
Esempio n. 19
0
    def setUp(self):
        # test parameter to compare analytic solution to simulation
        self.rtol = 1.0

        # test parameters
        self.N = 100
        self.rate_ex = 1.5 * 1e4
        self.J = 0.1

        # simulation parameters
        self.simtime = 500.
        self.dt = 0.1
        self.start = 200.

        nest.set_verbosity('M_WARNING')
        nest.ResetKernel()
        nest.SetKernelStatus({
            'resolution': self.dt,
            'use_wfr': False,
            'print_time': True
        })

        # set up driven iaf neuron

        self.iaf_neuron = nest.Create('iaf_psc_delta',
                                      self.N)  # , params={"C_m": 1.0})

        self.poisson_generator = nest.Create('poisson_generator',
                                             params={'rate': self.rate_ex})
        nest.Connect(self.poisson_generator,
                     self.iaf_neuron,
                     syn_spec={
                         'weight': self.J,
                         'delay': self.dt
                     })

        self.spike_detector = nest.Create("spike_detector",
                                          params={'start': self.start})
        nest.Connect(self.iaf_neuron, self.spike_detector)

        # set up driven siegert neuron

        neuron_status = nest.GetStatus(self.iaf_neuron)[0]
        siegert_params = {
            'tau_m': neuron_status['tau_m'],
            't_ref': neuron_status['t_ref'],
            'theta': neuron_status['V_th'] - neuron_status['E_L'],
            'V_reset': neuron_status['V_reset'] - neuron_status['E_L']
        }
        self.siegert_neuron = nest.Create('siegert_neuron',
                                          params=siegert_params)

        self.siegert_drive = nest.Create('siegert_neuron',
                                         1,
                                         params={'mean': self.rate_ex})
        J_mu_ex = neuron_status['tau_m'] * 1e-3 * self.J
        J_sigma_ex = neuron_status['tau_m'] * 1e-3 * self.J**2
        syn_dict = {
            'drift_factor': J_mu_ex,
            'diffusion_factor': J_sigma_ex,
            'model': 'diffusion_connection'
        }
        nest.Connect(self.siegert_drive,
                     self.siegert_neuron,
                     syn_spec=syn_dict)

        self.multimeter = nest.Create("multimeter",
                                      params={
                                          'record_from': ['rate'],
                                          'interval': self.dt
                                      })
        nest.Connect(self.multimeter, self.siegert_neuron)
Esempio n. 20
0
    def test_GetKernelStatus(self):
        """GetKernelStatus"""

        nest.ResetKernel()
        s = nest.GetKernelStatus()
Esempio n. 21
0
def test(referenceModel, testant, gsl_error_tol, tolerance=0.000001):
    nest.ResetKernel()
    neuron1 = nest.Create(referenceModel)
    neuron2 = nest.Create(testant)

    if not (gsl_error_tol is None):
        nest.SetStatus(neuron2, {"gsl_error_tol": gsl_error_tol})

    spikegenerator = nest.Create('spike_generator',
                                 params={
                                     'spike_times': [100.0, 200.0],
                                     'spike_weights': [20.0, -20.0]
                                 })

    nest.Connect(spikegenerator, neuron1)
    nest.Connect(spikegenerator, neuron2)

    multimeter1 = nest.Create('multimeter')
    multimeter2 = nest.Create('multimeter')

    V_m_specifier = 'V_m'  # 'delta_V_m'
    nest.SetStatus(multimeter1, {
        "withtime": True,
        "record_from": [V_m_specifier]
    })
    nest.SetStatus(multimeter2, {
        "withtime": True,
        "record_from": [V_m_specifier]
    })

    nest.Connect(multimeter1, neuron1)
    nest.Connect(multimeter2, neuron2)

    nest.Simulate(400.0)
    dmm1 = nest.GetStatus(multimeter1)[0]
    Vms1 = dmm1["events"][V_m_specifier]
    ts1 = dmm1["events"]["times"]

    events1 = dmm1["events"]
    pylab.figure(1)

    dmm2 = nest.GetStatus(multimeter2)[0]
    Vms2 = dmm2["events"][V_m_specifier]
    ts2 = dmm2["events"]["times"]

    pylab.plot(ts1, Vms1, label="Reference " + referenceModel)
    pylab.plot(ts2, Vms2, label="Testant " + testant)
    pylab.legend(loc='upper right')

    pylab.show()
    for index in range(0, len(Vms1)):
        if abs(Vms1[index] - Vms2[index]) > tolerance:
            print('!!!!!!!!!!!!!!!!!!!!')
            print(
                str(Vms1[index]) + " divers from  " + str(Vms2[index]) +
                " at iteration: " + str(index) + " of overall iterations: " +
                str(len(Vms1)))
            print('!!!!!!!!!!!!!!!!!!!!')
            raise Exception(testant + ": TEST FAILED")
        elif abs(Vms1[index] - Vms2[index]) > 0:
            None  #print("Greater than 0 difference" + str(abs(Vms1[index]-Vms2[index])) + " at iteration: " + str(index) + " of overall iterations: " + str(len(Vms1)))
    print(testant + " PASSED")
Esempio n. 22
0
    def test_rate_instantaneous_and_delayed(self):

        # neuron parameters
        neuron_params = {'tau': 5., 'sigma': 0.}
        drive = 1.5
        delay = 2.
        weight = 0.5

        # simulation parameters
        simtime = 100.
        dt = 0.001

        nest.set_verbosity('M_WARNING')
        nest.ResetKernel()
        nest.SetKernelStatus({
            'resolution': dt,
            'use_wfr': True,
            'print_time': False
        })

        # set up rate neuron network
        rate_neuron_drive = nest.Create('lin_rate_ipn',
                                        params={
                                            'mu': drive,
                                            'sigma': 0.
                                        })

        rate_neuron_1 = nest.Create('lin_rate_ipn', params=neuron_params)
        rate_neuron_2 = nest.Create('lin_rate_ipn', params=neuron_params)

        multimeter = nest.Create('multimeter',
                                 params={
                                     'record_from': ['rate'],
                                     'precision': 10,
                                     'interval': dt
                                 })

        # record rates and connect neurons
        neurons = rate_neuron_1 + rate_neuron_2

        nest.Connect(multimeter, neurons, 'all_to_all', {'delay': 10.})

        nest.Connect(rate_neuron_drive, rate_neuron_1, 'all_to_all', {
            'model': 'rate_connection_instantaneous',
            'weight': weight
        })

        nest.Connect(rate_neuron_drive, rate_neuron_2, 'all_to_all', {
            'model': 'rate_connection_delayed',
            'delay': delay,
            'weight': weight
        })

        # simulate
        nest.Simulate(simtime)

        # make sure shifted rates are identical
        events = nest.GetStatus(multimeter)[0]['events']
        senders = events['senders']

        rate_1 = np.array(events['rate'][np.where(senders == rate_neuron_1)])
        times_2 = np.array(events['times'][np.where(senders == rate_neuron_2)])
        rate_2 = np.array(events['rate'][np.where(senders == rate_neuron_2)])

        # get shifted rate_2
        rate_2 = rate_2[times_2 > delay]
        # adjust length of rate_1 to be able to substract
        rate_1 = rate_1[:len(rate_2)]

        assert (np.sum(np.abs(rate_2 - rate_1)) < 1e-12)
Esempio n. 23
0
    def test_non_linear_dendrite(self):
        MAX_SSE = 1E-12

        I_dend_alias_name = 'I_dend'  # synaptic current
        I_dend_internal_name = 'I_kernel2__X__I_2'  # alias for the synaptic current

        input_path = os.path.join(
            os.path.realpath(
                os.path.join(os.path.dirname(__file__), "resources")),
            "iaf_psc_exp_nonlineardendrite.nestml")
        nest_path = nest.ll_api.sli_func("statusdict/prefix ::")
        target_path = 'target'
        logging_level = 'INFO'
        module_name = 'nestmlmodule'
        store_log = False
        suffix = '_nestml'
        dev = True
        to_nest(input_path, target_path, logging_level, module_name, store_log,
                suffix, dev)
        install_nest(target_path, nest_path)
        nest.set_verbosity("M_ALL")

        nest.ResetKernel()
        nest.Install("nestmlmodule")

        nrn = nest.Create("iaf_psc_exp_nonlineardendrite_nestml")

        sg = nest.Create("spike_generator",
                         params={"spike_times": [10., 20., 30.]})
        nest.Connect(sg,
                     nrn,
                     syn_spec={
                         "receptor_type": 2,
                         "weight": 30.,
                         "delay": 1.
                     })

        mm = nest.Create('multimeter')
        mm.set({
            "record_from": [
                I_dend_alias_name, I_dend_internal_name, 'V_m',
                'dend_curr_enabled', 'I_dend_ap'
            ]
        })
        nest.Connect(mm, nrn)

        nest.Simulate(100.0)

        timevec = mm.get("events")["times"]
        I_dend_alias_ts = mm.get("events")[I_dend_alias_name]
        I_dend_internal_ts = mm.get("events")[I_dend_internal_name]

        if TEST_PLOTS:
            fig, ax = plt.subplots(3, 1)
            ax[0].plot(timevec, I_dend_alias_ts, label="aliased I_dend_syn")
            ax[0].plot(timevec,
                       I_dend_internal_ts,
                       label="internal I_dend_syn")
            ax[0].legend()
            ax_ = ax[0].twinx()
            ax_.plot(timevec, mm.get("events")["dend_curr_enabled"])
            ax_.set_ylabel("dend_curr_enabled")
            ax[1].plot(timevec, mm.get("events")["I_dend_ap"])
            ax[1].set_ylabel("I_dend_AP")
            ax[2].plot(timevec, mm.get("events")["V_m"], label="V_m")
            for _ax in ax:
                _ax.legend()
                _ax.grid()
            plt.ylabel("Dendritic current $I_{dend}$")
            plt.suptitle("Reset of synaptic integration after dendritic spike")
            plt.savefig("/tmp/nestml_triplet_stdp_test.png")

        assert np.all(
            I_dend_alias_ts == I_dend_internal_ts
        ), "Variable " + str(
            I_dend_alias_name
        ) + " and (internal) variable " + str(
            I_dend_internal_name
        ) + " should measure the same thing, but discrepancy in values occurred."

        tidx = np.argmin((timevec - 40)**2)
        assert mm.get("events")["I_dend_ap"][
            tidx] > 0., "Expected a dendritic action potential around t = 40 ms, but dendritic action potential current is zero"
        assert mm.get("events")["dend_curr_enabled"][
            tidx] == 0., "Dendritic synaptic current should be disabled during dendritic action potential"
        tidx_ap_end = tidx + np.where(
            mm.get("events")["dend_curr_enabled"][tidx:] == 1.)[0][0]
        assert np.all(
            I_dend_alias_ts[tidx_ap_end:] == 0.
        ), "After dendritic spike, dendritic current should be reset to 0 and stay at 0."
    def NESTSimulation(self,retina_spikes):

        # NEST Kernel and Network settings
        nest.ResetKernel()
        nest.ResetNetwork()
        nest.SetKernelStatus(
        {"local_num_threads": self.Params['NEST_threads'],
        'resolution': self.Params['resolution'], "rng_seeds": list(self.seeds)})

        # import network description
        import thalamocortical_system

        # get network info
        models, layers, conns  = thalamocortical_system.get_Network(self.Params)

        # Create models
        for m in models:
                nest.CopyModel(m[0], m[1], m[2])

        print ("\n---Creating layers---\n")
        # Create layers, store layer info in Python variable
        layer_IDs = []
        for l in layers:
            exec ("%s = tp.CreateLayer(%s)" % (l[0],l[1]),globals())
            exec ("copy_var = %s" % l[0],globals())
            layer_IDs.append([l[0],copy_var,l[1]['elements']])
#           print (l[0])

        print ("\n---Connecting layers---\n")
        # Create connections, need to insert variable names
        for c in conns:
                eval('tp.ConnectLayers(%s,%s,c[2])' % (c[0], c[1]))
#                print ('tp.ConnectLayers(%s,%s)' % (c[0], c[1]))

        # Initialize spike generators
        Midget_ganglion_cells_L_ON_spikes = retina_spikes[0]
        Midget_ganglion_cells_L_OFF_spikes = retina_spikes[1]
        Midget_ganglion_cells_M_ON_spikes = retina_spikes[2]
        Midget_ganglion_cells_M_OFF_spikes = retina_spikes[3]
        cell = 0

        for x in np.arange(self.Params['N_LGN']):
            for y in np.arange(self.Params['N_LGN']):
                nest.SetStatus([tp.GetElement(Midget_ganglion_cells_L_ON,(x,y))[0]],
                [{'spike_times':Midget_ganglion_cells_L_ON_spikes[cell],
                'spike_weights':[]}])

                nest.SetStatus([tp.GetElement(Midget_ganglion_cells_L_OFF,(x,y))[0]],
                [{'spike_times':Midget_ganglion_cells_L_OFF_spikes[cell],
                'spike_weights':[]}])

                nest.SetStatus([tp.GetElement(Midget_ganglion_cells_M_ON,(x,y))[0]],
                [{'spike_times':Midget_ganglion_cells_M_ON_spikes[cell],
                'spike_weights':[]}])

                nest.SetStatus([tp.GetElement(Midget_ganglion_cells_M_OFF,(x,y))[0]],
                [{'spike_times':Midget_ganglion_cells_M_OFF_spikes[cell],
                'spike_weights':[]}])

                cell+=1

        ## Check-point: Visualization functions
#        fig = tp.PlotLayer(Color_Luminance_inh_L_ON_L_OFF_vertical,nodesize =80)
#        ctr = tp.FindCenterElement(Parvo_LGN_relay_cell_L_ON)
#        tp.PlotTargets(ctr,Color_Luminance_inh_L_ON_L_OFF_vertical,fig = fig,mask=conns[26][2]['mask'],
#        kernel=conns[26][2]['kernel'],src_size=250,tgt_color='red',tgt_size=20,
#        kernel_color='green')
#        plt.show()

#        ctr = tp.FindCenterElement(Color_Luminance_inh_L_ON_L_OFF_vertical)
#        print ("ctr = ",ctr," L-ON = ",tp.FindCenterElement(Parvo_LGN_relay_cell_L_ON)," L-OFF = ",tp.FindCenterElement(Parvo_LGN_relay_cell_L_OFF))
#        for xx in np.arange(5):
#            for yy in np.arange(5):
#                ctr = [tp.GetElement(Color_Luminance_inh_L_ON_L_OFF_vertical,(xx,yy))[0]]
#                conns = nest.GetConnections(target = [ctr[0]])
#                print ("Cell ",ctr)
#                for nn in np.arange(len(conns)):
#                    print ("conns = ",conns[nn])

#        ctr = tp.FindCenterElement(Parvo_LGN_relay_cell_L_ON)
#        targets = tp.GetTargetNodes(ctr,Color_Luminance_L_ON_L_OFF_vertical)
#        print ("targets = ",targets)

        return layer_IDs
    def _test_model_subthreshold(self,
                                 referenceModel,
                                 testant,
                                 gsl_error_tol,
                                 tolerance=0.000001,
                                 nest_ref_model_opts=None,
                                 custom_model_opts=None):
        t_stop = 1000.  # [ms]

        I_stim_vec = np.linspace(10E-12, 1E-9, 100)  # [A]
        rate_testant = float("nan") * np.ones_like(I_stim_vec)
        rate_reference = float("nan") * np.ones_like(I_stim_vec)
        for i, I_stim in enumerate(I_stim_vec):

            nest.ResetKernel()
            neuron1 = nest.Create(referenceModel, params=nest_ref_model_opts)
            neuron2 = nest.Create(testant, params=custom_model_opts)

            if gsl_error_tol is not None:
                nest.SetStatus(neuron2, {"gsl_error_tol": gsl_error_tol})

            dc = nest.Create("dc_generator",
                             params={"amplitude":
                                     I_stim * 1E12})  # 1E12: convert A to pA

            nest.Connect(dc, neuron1)
            nest.Connect(dc, neuron2)

            multimeter1 = nest.Create('multimeter')
            multimeter2 = nest.Create('multimeter')

            V_m_specifier = 'V_m'  # 'delta_V_m'
            nest.SetStatus(multimeter1, {"record_from": [V_m_specifier]})
            nest.SetStatus(multimeter2, {"record_from": [V_m_specifier]})

            nest.Connect(multimeter1, neuron1)
            nest.Connect(multimeter2, neuron2)

            sd_reference = nest.Create('spike_recorder')
            nest.Connect(neuron1, sd_reference)
            sd_testant = nest.Create('spike_recorder')
            nest.Connect(neuron2, sd_testant)

            nest.Simulate(t_stop)
            dmm1 = nest.GetStatus(multimeter1)[0]
            Vms1 = dmm1["events"][V_m_specifier]
            ts1 = dmm1["events"]["times"]

            dmm2 = nest.GetStatus(multimeter2)[0]
            Vms2 = dmm2["events"][V_m_specifier]
            ts2 = dmm2["events"]["times"]

            rate_testant[i] = sd_testant.n_events / t_stop * 1000
            rate_reference[i] = sd_reference.n_events / t_stop * 1000

            if TEST_PLOTS and False:
                fig, ax = plt.subplots(2, 1)
                ax[0].plot(ts1, Vms1, label="Reference " + referenceModel)
                ax[1].plot(ts2, Vms2, label="Testant " + testant)
                for _ax in ax:
                    _ax.legend(loc='upper right')
                    _ax.grid()
                fig.suptitle("Rate: " + str(rate_testant[i]) + " Hz")
                plt.savefig(
                    "/tmp/nestml_nest_integration_test_subthreshold_[" +
                    referenceModel + "]_[" + testant + "]_[I_stim=" +
                    str(I_stim) + "].png")
                plt.close(fig)

        if TEST_PLOTS:
            if len(I_stim_vec) < 20:
                marker = "o"
            else:
                marker = None
            fig, ax = plt.subplots(2, 1)
            ax[0].plot(I_stim_vec * 1E12,
                       rate_reference,
                       marker=marker,
                       label="Reference " + referenceModel)
            ax[1].plot(I_stim_vec * 1E12,
                       rate_testant,
                       marker=marker,
                       label="Testant " + testant)
            for _ax in ax:
                _ax.legend(loc='upper right')
                _ax.grid()
                _ax.set_ylabel("Firing rate [Hz]")
            ax[1].set_xlabel("$I_{inj}$ [pA]")
            plt.savefig("/tmp/nestml_nest_integration_test_subthreshold_[" +
                        referenceModel + "]_[" + testant + "].png")
            plt.close(fig)

        if TEST_PLOTS:
            if len(I_stim_vec) < 20:
                marker = "o"
            else:
                marker = None
            for figsize, fname_snip in zip([(8, 5), (4, 3)], ["", "_small"]):
                fig, ax = plt.subplots(1, 1, figsize=figsize)
                ax = [ax]
                ax[0].plot(I_stim_vec * 1E12,
                           rate_testant,
                           marker=marker,
                           label=referenceModel)
                for _ax in ax:
                    _ax.grid()
                    _ax.set_ylabel("Firing rate [Hz]")
                ax[0].set_xlabel("$I_{inj}$ [pA]")
                plt.tight_layout()
                plt.savefig("/tmp/nestml_models_library_[" + referenceModel +
                            "]_f-I_curve" + fname_snip + ".png")
                plt.close(fig)

        print(testant + " PASSED")
Esempio n. 26
0
def eSTDP (alpha, step, mu_plus, mu_minus, initweight, maxweight, tau_m, tau_p):
	import matplotlib.pyplot as plt
	import numpy as np
	import sys
	sys.path.append("/home/vasco/Documenti/NEST/lib/x86_64-linux-gnu/python2.7/site-packages")
	import nest
	import nest.raster_plot
	#nest.Install("albertomodule")


	nest.ResetKernel()
	nest.set_verbosity('M_WARNING')
	maxweight = 20.0
	delay = 1.0
	num_parr = 400



	parrot = nest.Create("parrot_neuron", num_parr)
	
	nest.SetStatus(parrot, {'tau_minus': tau_m})
	senders_e= parrot[:num_parr/4]
	receivers_e = parrot[num_parr/4:num_parr/2]
	senders_i = parrot[num_parr/2: 3*num_parr/4]
	receivers_i = parrot[3*num_parr/4:]


	nest.CopyModel('istdp_synapse', 'stdp',{'alpha': alpha, 'lambda': step, 'mu_plus': mu_plus,'mu_minus': mu_minus, 'Wmax': maxweight, 'tau_plus': tau_p})

	spikes_in = nest.Create('spike_detector', 1)
	spikes_out = nest.Create('spike_detector', 1)

	stimuli = nest.Create("spike_generator", 3)
	stimulus_e = stimuli[0]
	nest.SetStatus([stimulus_e], {'spike_times': [5.0, 10003.0]})

	''' LTP '''



	for i,senders_ei in enumerate(senders_e):
		nest.Connect([stimulus_e], [senders_ei], {'rule': 'all_to_all'}, {'delay' : delay})
		
	for i,receivers_ei in enumerate(receivers_e):
		nest.Connect([senders_e[i]], [receivers_ei], {'rule': 'all_to_all'}, {'model':'stdp', 'weight': initweight, 'delay' : delay})
		delay = delay + 1

	nest.Connect(senders_e, spikes_in)
	nest.Connect(receivers_e, spikes_out)



	''' LTD '''
	delay = 1.0
	stimulus_i = stimuli[1]
	stimulus_i_out = stimuli[2]
	nest.SetStatus([stimulus_i], {'spike_times': [7.0, 10005.0]})
	nest.SetStatus([stimulus_i_out], {'spike_times': [5.0]})

	nest.Connect([stimulus_i_out], receivers_i, {'rule': 'all_to_all'}, {'model': 'static_synapse'})

	for i,senders_ii in enumerate(senders_i):
		nest.Connect([stimulus_i], [senders_ii], {'rule': 'all_to_all'}, {'model': 'static_synapse', 'delay': delay})
		delay = delay + 1

	for i, receivers_ii in enumerate(receivers_i):
		nest.Connect([senders_i[i]], [receivers_ii], {'rule': 'all_to_all'}, {'model': 'stdp', 'weight': initweight, 'delay': 1.0})


	nest.Simulate(10006)

	deltaTime = range(-100, 100)


	Exc = nest.GetConnections(senders_e, receivers_e)
	weights = nest.GetStatus(Exc, "weight")
	#print(weights)

	weights_e = np.zeros(100)
	weights_e_perc = np.zeros(100)

	for i in range(0, 100):
		a = weights[i] - initweight
		weights_e[i] = a
		weights_e_perc[i] = (a/initweight) * 100

	
	Inh = nest.GetConnections(senders_i, receivers_i)
	weights = nest.GetStatus(Inh, "weight")
	

	weights_i = np.zeros(100)
	weights_i_perc = np.zeros(100)
	for i in range(0, 100):
		a = weights[i] - initweight
	
		weights_i[99 - i] = a
		weights_i_perc[99 -i] = (a/initweight) * 100

	
	W = np.concatenate(( weights_i,weights_e))
	W_perc = np.concatenate((weights_i_perc, weights_e_perc))
	pesi = np.concatenate((W, W_perc))
	return(pesi)
    def test_nest_integration(self):
        # N.B. all models are assumed to have been already built (see .travis.yml)

        nest.ResetKernel()
        nest.set_verbosity("M_ALL")
        nest.Install("nestml_allmodels_module")

        models = []
        """models.append(("iaf_psc_delta", "iaf_psc_delta_nestml", None, 1E-3))
        models.append(("iaf_psc_exp", "iaf_psc_exp_nestml", None, .01))
        models.append(("iaf_psc_alpha", "iaf_psc_alpha_nestml", None, 1E-3))

        models.append(("iaf_cond_exp", "iaf_cond_exp_nestml", 1E-3, 1E-3))
        models.append(("iaf_cond_alpha", "iaf_cond_alpha_nestml", 1E-3, 1E-3))
        models.append(("iaf_cond_beta", "iaf_cond_beta_nestml", 1E-3, 1E-3, {"tau_rise_ex": 2., "tau_decay_ex": 10., "tau_rise_in": 2., "tau_decay_in": 10.}, {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10., "tau_syn_rise_I": 2., "tau_syn_decay_I": 10.}))        # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators)"""

        models.append(
            ("izhikevich", "izhikevich_nestml", 1E-3, 1)
        )  # large tolerance because NEST Simulator model does not use GSL solver, but simple forward Euler
        models.append(("hh_psc_alpha", "hh_psc_alpha_nestml", 1E-3, 1E-3))
        models.append(("iaf_chxk_2008", "iaf_chxk_2008_nestml", 1E-3, 1E-3))
        models.append(("aeif_cond_exp", "aeif_cond_exp_nestml", 1.e-3, 1E-3))
        models.append(
            ("aeif_cond_alpha", "aeif_cond_alpha_nestml", 1.e-3, 1E-3))

        # --------------
        # XXX: TODO!

        # models.append(("hh_cond_exp_traub", "hh_cond_exp_traub_nestml", 1.e-3, 1E-3))
        # models.append(("ht_neuron", "hill_tononi_nestml", None, 1E-3))
        # models.append(("iaf_cond_exp_sfa_rr", "iaf_cond_exp_sfa_rr_nestml", 1.e-3, 1E-3))
        # models.append(("iaf_tum_2000", "iaf_tum_2000_nestml", None, 0.01))
        # models.append(("mat2_psc_exp", "mat2_psc_exp_nestml", None, 0.1))

        for model in models:
            reference = model[0]
            testant = model[1]
            gsl_error_tol = model[2]
            tolerance = model[3]
            if len(model) > 4:
                nest_ref_model_opts = model[4]
            else:
                nest_ref_model_opts = None

            if len(model) > 5:
                custom_model_opts = model[5]
            else:
                custom_model_opts = None

            print("Now testing model: " + str(testant) +
                  " (reference model: " + str(reference) + ")")
            self._test_model(reference, testant, gsl_error_tol, tolerance,
                             nest_ref_model_opts, custom_model_opts)
            self._test_model_subthreshold(reference, testant, gsl_error_tol,
                                          tolerance, nest_ref_model_opts,
                                          custom_model_opts)

        all_models = [
            s[:-7] for s in list(os.walk("models/neurons"))[0][2]
            if s[-7:] == ".nestml"
        ]
        self.generate_models_documentation(models, all_models)
Esempio n. 28
0
def __initialize_test_data(params_file_):
    plot = False
    display = True
    save = True

    # ##################################################################################################################
    # Extract parameters from file and build global ParameterSet
    # ==================================================================================================================
    parameter_set = ParameterSpace(params_file_)[0]
    parameter_set = parameter_set.clean(termination='pars')

    if not isinstance(parameter_set, ParameterSet):
        if isinstance(parameter_set, basestring) or isinstance(
                parameter_set, dict):
            parameter_set = ParameterSet(parameter_set)
        else:
            raise TypeError(
                "parameter_set must be ParameterSet, string with full path to parameter file or dictionary"
            )

    # ##################################################################################################################
    # Setup extra variables and parameters
    # ==================================================================================================================
    if plot:
        set_global_rcParams(parameter_set.kernel_pars['mpl_path'])
    paths = set_storage_locations(parameter_set, save)

    np.random.seed(parameter_set.kernel_pars['np_seed'])

    # ##################################################################################################################
    # Set kernel and simulation parameters
    # ==================================================================================================================
    print '\nRuning ParameterSet {0}'.format(parameter_set.label)
    nest.ResetKernel()
    nest.set_verbosity('M_WARNING')
    nest.SetKernelStatus(
        extract_nestvalid_dict(parameter_set.kernel_pars.as_dict(),
                               param_type='kernel'))

    # ##################################################################################################################
    # Build network
    # ==================================================================================================================
    net = Network(parameter_set.net_pars)

    # ##################################################################################################################
    # Randomize initial variable values
    # ==================================================================================================================
    for idx, n in enumerate(list(iterate_obj_list(net.populations))):
        if hasattr(parameter_set.net_pars, "randomize_neuron_pars"):
            randomize = parameter_set.net_pars.randomize_neuron_pars[idx]
            for k, v in randomize.items():
                n.randomize_initial_states(k,
                                           randomization_function=v[0],
                                           **v[1])

    ####################################################################################################################
    # Build Input Signal Sets
    # ==================================================================================================================
    assert hasattr(parameter_set, "input_pars")

    total_stimulation_time = parameter_set.kernel_pars.sim_time + parameter_set.kernel_pars.transient_t

    # Current input (need to build 2 separate noise signals for the 2 input channels)
    # Generate input for channel 1
    input_noise_ch1 = InputNoise(parameter_set.input_pars.noise,
                                 rng=np.random,
                                 stop_time=total_stimulation_time)
    input_noise_ch1.generate()
    input_noise_ch1.re_seed(parameter_set.kernel_pars.np_seed)

    # Generate input for channel 2
    input_noise_ch2 = InputNoise(parameter_set.input_pars.noise,
                                 rng=np.random,
                                 stop_time=total_stimulation_time)
    input_noise_ch2.generate()
    input_noise_ch2.re_seed(parameter_set.kernel_pars.np_seed)

    if plot:
        inp_plot = InputPlots(stim_obj=None,
                              input_obj=None,
                              noise_obj=input_noise_ch1)
        inp_plot.plot_noise_component(display=display,
                                      save=paths['figures'] +
                                      "/InputNoise_CH1")

        inp_plot = InputPlots(stim_obj=None,
                              input_obj=None,
                              noise_obj=input_noise_ch2)
        inp_plot.plot_noise_component(display=display,
                                      save=paths['figures'] +
                                      "/InputNoise_CH2")

    # ##################################################################################################################
    # Build and connect input
    # ==================================================================================================================
    enc_layer_ch1 = EncodingLayer(parameter_set.encoding_ch1_pars,
                                  signal=input_noise_ch1)
    enc_layer_ch1.connect(parameter_set.encoding_ch1_pars, net)

    enc_layer_ch2 = EncodingLayer(parameter_set.encoding_ch2_pars,
                                  signal=input_noise_ch2)
    enc_layer_ch2.connect(parameter_set.encoding_ch2_pars, net)

    # ##################################################################################################################
    # Connect Devices
    # ==================================================================================================================
    net.connect_devices()

    # ##################################################################################################################
    # Simulate
    # ==================================================================================================================
    if parameter_set.kernel_pars.transient_t:
        net.simulate(parameter_set.kernel_pars.transient_t)
        net.flush_records()

    net.simulate(parameter_set.kernel_pars.sim_time +
                 nest.GetKernelStatus()['resolution'])

    # ##################################################################################################################
    # Extract and store data
    # ==================================================================================================================
    net.extract_population_activity(
        t_start=parameter_set.kernel_pars.transient_t,
        t_stop=parameter_set.kernel_pars.sim_time +
        parameter_set.kernel_pars.transient_t)
    net.extract_network_activity()

    # ##################################################################################################################
    # Analyse / plot data
    # ==================================================================================================================
    analysis_interval = [
        parameter_set.kernel_pars.transient_t,
        parameter_set.kernel_pars.transient_t +
        parameter_set.kernel_pars.sim_time
    ]

    results = dict()

    for idd, nam in enumerate(net.population_names):
        results.update({nam: {}})
        results[nam] = single_neuron_responses(net.populations[idd],
                                               parameter_set,
                                               pop_idx=idd,
                                               start=analysis_interval[0],
                                               stop=analysis_interval[1],
                                               plot=plot,
                                               display=display,
                                               save=paths['figures'] +
                                               paths['label'])
        if results[nam]['rate']:
            print('Output Rate [{0}] = {1} spikes/s'.format(
                str(nam), str(results[nam]['rate'])))

    # ######################################################################################################################
    # Save data
    # ======================================================================================================================
    data = dict()

    data['connections_from'] = {
        pop.name: nest.GetConnections(source=pop.gids)
        for (idx, pop) in enumerate(net.populations)
    }
    data['connections_to'] = {
        pop.name: nest.GetConnections(target=pop.gids)
        for (idx, pop) in enumerate(net.populations)
    }
    data['results'] = results

    data['input'] = {
        'channel1': input_noise_ch1.noise_signal.analog_signals[.0].signal,
        'channel2': input_noise_ch2.noise_signal.analog_signals[.0].signal
    }

    data['network'] = {
        'populations': {
            net.populations[0].name: net.populations[0]
        }
    }

    return data
See Also
~~~~~~~~

:doc:`brunel_alpha_nest`

"""

###############################################################################
# Import all necessary modules for simulation, analysis and plotting.

import time
import nest
import nest.raster_plot
import matplotlib.pyplot as plt

nest.ResetKernel()

###############################################################################
# Assigning the current time to a variable in order to determine the build
# time of the network.

startbuild = time.time()

###############################################################################
# Assigning the simulation parameters to variables.

dt = 0.1  # the resolution in ms
simtime = 1000.0  # Simulation time in ms
delay = 1.5  # synaptic delay in ms

###############################################################################
Esempio n. 30
0
    def run_post_trace_test_nest_(self,
                                  show_all_nest_trace_samples=False):

        nest.set_verbosity("M_WARNING")

        nest.ResetKernel()
        nest.resolution = self.resolution_

        wr = nest.Create('weight_recorder')
        nest.CopyModel("stdp_synapse", "stdp_synapse_rec",
                       {"weight_recorder": wr, "weight": 1.})

        # create spike_generators with these times
        pre_sg_ps = nest.Create("spike_generator",
                                params={"spike_times": self.pre_spike_times_,
                                        'precise_times': True})
        post_sg_ps = nest.Create("spike_generator",
                                 params={"spike_times": self.post_spike_times_,
                                         'precise_times': True})

        # create parrot neurons and connect spike_generators
        pre_parrot_ps = nest.Create("parrot_neuron_ps")
        post_parrot_ps = nest.Create("parrot_neuron_ps",
                                     params={"tau_minus": self.tau_minus_})

        nest.Connect(pre_sg_ps, pre_parrot_ps,
                     syn_spec={"delay": self.delay_})
        nest.Connect(post_sg_ps, post_parrot_ps,
                     syn_spec={"delay": self.delay_})

        # create spike recorder --- debugging only
        spikes = nest.Create("spike_recorder")
        nest.Connect(pre_parrot_ps + post_parrot_ps, spikes)

        # connect both parrot neurons with a stdp synapse onto port 1
        # thereby spikes transmitted through the stdp connection are
        # not repeated postsynaptically.
        nest.Connect(
            pre_parrot_ps, post_parrot_ps,
            syn_spec={'synapse_model': 'stdp_synapse_rec',
                      'receptor_type': 1,
                      'delay': self.delay_})

        # get STDP synapse
        syn_ps = nest.GetConnections(source=pre_parrot_ps,
                                     synapse_model="stdp_synapse_rec")

        print("[py] Total simulation time: " + str(self.sim_time_) + " ms")
        n_steps = int(np.ceil(self.sim_time_ / self.delay_))
        trace_nest = []
        trace_nest_t = []
        t = nest.biological_time
        trace_nest_t.append(t)
        post_tr = nest.GetStatus(post_parrot_ps)[0]['post_trace']
        trace_nest.append(post_tr)
        for step in range(n_steps):
            print("\n[py] simulating for " + str(self.delay_) + " ms")
            nest.Simulate(self.delay_)
            t = nest.biological_time
            nearby_pre_spike = np.any(
                np.abs(t - np.array(self.pre_spike_times_) - self.delay_) < self.resolution_ / 2.)
            if show_all_nest_trace_samples or nearby_pre_spike:
                trace_nest_t.append(t)
                post_tr = nest.GetStatus(post_parrot_ps)[0]['post_trace']
                trace_nest.append(post_tr)
                print("[py] Received NEST trace: " +
                      str(post_tr) + " at time t = " + str(t))

        return trace_nest_t, trace_nest