コード例 #1
0
def setup(timestep=0.1, min_delay=0.1, max_delay=10.0, **extra_params):
    """
    Should be called at the very beginning of a script.
    extra_params contains any keyword arguments that are required by a given
    simulator but not by others.
    """
    global tempdir

    common.setup(timestep, min_delay, max_delay, **extra_params)
    # clear the sli stack, if this is not done --> memory leak cause the stack increases
    nest.sr('clear')

    # reset the simulation kernel
    nest.ResetKernel()

    if 'verbosity' in extra_params:
        nest_verbosity = extra_params['verbosity'].upper()
    else:
        nest_verbosity = "WARNING"
    nest.sli_run("M_%s setverbosity" % nest_verbosity)

    if "spike_precision" in extra_params:
        simulator.state.spike_precision = extra_params["spike_precision"]
        if extra_params["spike_precision"] == 'off_grid':
            simulator.state.default_recording_precision = 15
    nest.SetKernelStatus(
        {'off_grid_spiking': simulator.state.spike_precision == 'off_grid'})
    if "recording_precision" in extra_params:
        simulator.state.default_recording_precision = extra_params[
            "recording_precision"]

    # all NEST to erase previously written files (defaut with all the other simulators)
    nest.SetKernelStatus({'overwrite_files': True})

    # set tempdir
    tempdir = tempfile.mkdtemp()
    tempdirs.append(tempdir)  # append tempdir to tempdirs list
    nest.SetKernelStatus({
        'data_path': tempdir,
    })

    # set kernel RNG seeds
    num_threads = extra_params.get('threads') or 1
    if 'rng_seeds' in extra_params:
        rng_seeds = extra_params['rng_seeds']
    else:
        rng_seeds_seed = extra_params.get('rng_seeds_seed') or 42
        rng = NumpyRNG(rng_seeds_seed)
        rng_seeds = (rng.rng.uniform(size=num_threads * num_processes()) *
                     100000).astype('int').tolist()
    logger.debug("rng_seeds = %s" % rng_seeds)
    nest.SetKernelStatus({
        'local_num_threads': num_threads,
        'rng_seeds': rng_seeds
    })

    # set resolution
    nest.SetKernelStatus({'resolution': timestep})

    if 'allow_offgrid_spikes' in nest.GetDefaults('spike_generator'):
        nest.SetDefaults('spike_generator', {'allow_offgrid_spikes': True})

    # Set min_delay and max_delay for all synapse models
    NEST_SYNAPSE_TYPES = nest.Models(
        mtype='synapses')  # need to rebuild after ResetKernel
    for synapse_model in NEST_SYNAPSE_TYPES:
        nest.SetDefaults(synapse_model, {
            'delay': min_delay,
            'min_delay': min_delay,
            'max_delay': max_delay
        })
    simulator.connection_managers = []
    simulator.populations = []
    simulator.reset()

    return rank()
コード例 #2
0
    def do_test_targets(self):
        nest.ResetKernel()
        nest.set_verbosity('M_ALL')
        # Testing with 2 MPI processes
        nest.SetKernelStatus({'resolution': 0.1, 'total_num_virtual_procs': 2})
        # Update the SP interval
        nest.EnableStructuralPlasticity()
        nest.SetStructuralPlasticityStatus({
            'structural_plasticity_update_interval':
            100,
        })

        growth_curve = {
            'growth_curve': "gaussian",
            'growth_rate': 0.0001,  # Beta (elements/ms)
            'continuous': False,
            'eta': 0.1,
            'eps': 0.7,
        }
        structural_p_elements_E = {
            'Den_ex': growth_curve,
            'Den_in': growth_curve,
            'Axon_ex': growth_curve
        }
        neuronDict = {
            'V_m': -60.,
            't_ref': 5.0,
            'V_reset': -60.,
            'V_th': -50.,
            'C_m': 200.,
            'E_L': -60.,
            'g_L': 10.,
            'E_ex': 0.,
            'E_in': -80.,
            'tau_syn_ex': 5.,
            'tau_syn_in': 10.,
            'I_e': 220.
        }

        nest.SetDefaults("iaf_cond_exp", neuronDict)
        neuronsE = nest.Create('iaf_cond_exp', 1,
                               {'synaptic_elements': structural_p_elements_E})

        # synapses
        synDictE = {
            'model': 'static_synapse',
            'weight': 3.,
            'pre_synaptic_element': 'Axon_ex',
            'post_synaptic_element': 'Den_ex'
        }

        nest.SetStructuralPlasticityStatus(
            {'structural_plasticity_synapses': {
                'synapseEE': synDictE,
            }})

        try:
            nest.Simulate(200 * 1000)
        except:
            print(sys.exc_info()[0])
            self.fail("Exception during simulation")
コード例 #3
0
                 connections,
                 pparam=True):
        super().__init__(nparam, cparam, synparam, noise, synparamNoise,
                         connections, pparam)
        #self.setThreshold()

    def setThreshold(self):
        check = ['GPTI', 'GPTA', 'STN', 'GPI']
        for ID in self.nID:
            #print(self.nID[ID])
            if ID not in check:
                nest.SetStatus([self.nID[ID][0]], {'V_th': 1e3})
            else:
                nest.SetStatus([self.nID[ID][0]], {'V_peak': 1e3})


paramNEW = t.tune(param.nparam, param.cparamOLD, param.staticsyn, param.noise,
                  param.staticsynNoise, param.connections)

BG1 = freeMembrane(paramNEW.nparam, paramNEW.cparam, paramNEW.synparam,
                   paramNEW.noise, paramNEW.synparamNoise,
                   paramNEW.connections, paramNEW.pparam)
nest.SetKernelStatus({
    'data_path':
    '/Users/kimhedelin/Google Drive/VT18/Neuroscience/simulation/BGnetwork/sample/freeMembrane/data/',
    'overwrite_files': True
})
BG1.connectMultimeterNew(to_file=True)
BG1.connectSpikeDet()
BG1.setIe(0.0)
BG1.simulate(500.0)
コード例 #4
0
def run_nest_simulation(
        neuron_model_name,
        synapse_model_name,
        neuron_opts,
        syn_opts,
        nest_modules_to_load=None,
        resolution=1.,  # [ms]
        sim_time=None,  # if None, computed from pre and post spike times
        pre_spike_times_req=None,
        post_spike_times_req=None,
        J_ext=10000.,
        fname_snip=""):

    if pre_spike_times_req is None:
        pre_spike_times_req = []

    if post_spike_times_req is None:
        post_spike_times_req = []

    if sim_time is None:
        sim_time = max(
            np.amax(pre_spike_times_req),
            np.amax(post_spike_times_req)) + 10. + 3 * syn_opts["delay"]

    nest.set_verbosity("M_ALL")

    # Set parameters of the NEST simulation kernel
    nest.ResetKernel()

    try:
        if nest_modules_to_load:
            for s in nest_modules_to_load:
                nest.Install(s)
    except Exception:
        pass  # will fail when run in a loop ("module is already loaded")

    nest.SetKernelStatus({'print_time': False, 'local_num_threads': 1})
    nest.SetKernelStatus({'resolution': resolution})

    nest.SetDefaults(neuron_model_name, neuron_opts)

    # Create nodes -------------------------------------------------

    neurons = nest.Create(neuron_model_name, 2)

    print("Requested pre times: " + str(pre_spike_times_req))
    print("Requested post times: " + str(post_spike_times_req))

    # one more pre spike to obtain updated values at end of simulation
    pre_spike_times_req = np.hstack(
        (pre_spike_times_req, [sim_time - syn_opts["delay"]]))

    external_input = nest.Create('spike_generator',
                                 params={'spike_times': pre_spike_times_req})
    external_input1 = nest.Create('spike_generator',
                                  params={'spike_times': post_spike_times_req})

    spikes = nest.Create('spike_recorder')
    weight_recorder_E = nest.Create('weight_recorder')

    # Set models default -------------------------------------------

    nest.CopyModel('static_synapse', 'excitatory_noise', {
        'weight': J_ext,
        'delay': syn_opts["delay"]
    })

    _syn_opts = syn_opts.copy()
    _syn_opts['Wmax'] = _syn_opts.pop('w_max')
    _syn_opts['Wmin'] = _syn_opts.pop('w_min')
    _syn_opts['w'] = _syn_opts.pop('w_init')
    _syn_opts.pop('delay')
    nest.CopyModel(synapse_model_name, synapse_model_name + "_rec",
                   {'weight_recorder': weight_recorder_E[0]})
    nest.SetDefaults(synapse_model_name + "_rec", _syn_opts)

    # Connect nodes ------------------------------------------------

    nest.Connect(neurons[0],
                 neurons[1],
                 syn_spec={'synapse_model': synapse_model_name + "_rec"})
    nest.Connect(external_input, neurons[0], syn_spec='excitatory_noise')
    nest.Connect(external_input1, neurons[1], syn_spec='excitatory_noise')
    # spike_recorder ignores connection delay; recorded times are times of spike creation rather than spike arrival
    nest.Connect(neurons, spikes)

    # Simulate -----------------------------------------------------

    nest.Simulate(sim_time)

    connections = nest.GetConnections(neurons, neurons)
    gid_pre = nest.GetStatus(connections, 'source')[0]
    gid_post = nest.GetStatus(connections, 'target')[0]

    events = nest.GetStatus(spikes, 'events')[0]
    times_spikes = np.array(events['times'])
    senders_spikes = events['senders']

    events = nest.GetStatus(weight_recorder_E, 'events')[0]
    times_weights = events['times']
    weight_simulation = events['weights']
    return times_weights, weight_simulation, gid_pre, gid_post, times_spikes, senders_spikes, sim_time
コード例 #5
0
# Definition of threshold rate, which is the external rate needed to fix the
# membrane potential around its threshold, the external firing rate and the
# rate of the poisson generator which is multiplied by the in-degree CE and
# converted to Hz by multiplication by 1000.

nu_th = (theta * CMem) / (J_ex * CE * np.exp(1) * tauMem * tauSyn)
nu_ex = eta * nu_th
p_rate = 1000.0 * nu_ex * CE

################################################################################
# Configuration of the simulation kernel by the previously defined time
# resolution used in the simulation. Setting ``print_time`` to `True` prints the
# already processed simulation time as well as its percentage of the total
# simulation time.

nest.SetKernelStatus({"resolution": dt, "print_time": True,
                      "overwrite_files": True})

print("Building network")

###############################################################################
# Configuration of the model ``iaf_psc_alpha`` and ``poisson_generator`` using
# ``SetDefaults``. This function expects the model to be the inserted as a
# string and the parameter to be specified in a dictionary. All instances of
# theses models created after this point will have the properties specified
# in the dictionary by default.

nest.SetDefaults("iaf_psc_alpha", neuron_params)
nest.SetDefaults("poisson_generator", {"rate": p_rate})

###############################################################################
# Creation of the nodes using ``Create``. We store the returned handles in
コード例 #6
0
    def do_the_nest_simulation(self):
        """
        This function is where calls to NEST reside. Returns the generated pre- and post spike sequences and the
        resulting weight established by STDP.
        """
        nest.set_verbosity('M_WARNING')
        nest.ResetKernel()
        nest.SetKernelStatus({'resolution': self.resolution})

        presynaptic_neuron, postsynaptic_neuron = nest.Create(
            self.nest_neuron_model, 2, params=self.neuron_parameters)

        generators = nest.Create(
            "poisson_generator",
            2,
            params=({
                "rate":
                self.presynaptic_firing_rate,
                "stop":
                (self.simulation_duration - self.hardcoded_trains_length)
            }, {
                "rate":
                self.postsynaptic_firing_rate,
                "stop":
                (self.simulation_duration - self.hardcoded_trains_length)
            }))
        presynaptic_generator = generators[0]
        postsynaptic_generator = generators[1]

        wr = nest.Create('weight_recorder')
        nest.CopyModel(self.synapse_model, self.synapse_model + "_rec",
                       {"weight_recorder": wr})

        spike_senders = nest.Create(
            "spike_generator",
            2,
            params=({
                "spike_times":
                self.hardcoded_pre_times + self.simulation_duration -
                self.hardcoded_trains_length
            }, {
                "spike_times":
                self.hardcoded_post_times + self.simulation_duration -
                self.hardcoded_trains_length
            }))
        pre_spike_generator = spike_senders[0]
        post_spike_generator = spike_senders[1]

        # The recorder is to save the randomly generated spike trains.
        spike_recorder = nest.Create("spike_recorder")

        nest.Connect(presynaptic_generator + pre_spike_generator,
                     presynaptic_neuron,
                     syn_spec={
                         "synapse_model": "static_synapse",
                         "weight": 9999.
                     })
        nest.Connect(postsynaptic_generator + post_spike_generator,
                     postsynaptic_neuron,
                     syn_spec={
                         "synapse_model": "static_synapse",
                         "weight": 9999.
                     })
        nest.Connect(presynaptic_neuron + postsynaptic_neuron,
                     spike_recorder,
                     syn_spec={"synapse_model": "static_synapse"})
        # The synapse of interest itself
        self.synapse_parameters["synapse_model"] += "_rec"
        nest.Connect(presynaptic_neuron,
                     postsynaptic_neuron,
                     syn_spec=self.synapse_parameters)
        self.synapse_parameters["synapse_model"] = self.synapse_model

        nest.Simulate(self.simulation_duration)

        all_spikes = nest.GetStatus(spike_recorder, keys='events')[0]
        pre_spikes = all_spikes['times'][all_spikes['senders'] ==
                                         presynaptic_neuron.tolist()[0]]
        post_spikes = all_spikes['times'][all_spikes['senders'] ==
                                          postsynaptic_neuron.tolist()[0]]

        t_hist = nest.GetStatus(wr, "events")[0]["times"]
        weight = nest.GetStatus(wr, "events")[0]["weights"]

        return pre_spikes, post_spikes, t_hist, weight
コード例 #7
0
N_E = 8000
N_I = 2000
N_neurons = N_E + N_I

C_E = N_E / 10  # number of excitatory synapses per neuron
C_I = N_I / 10  # number of inhibitory synapses per neuron

J_E = 0.1
J_I = -g * J_E

nu_ex = eta * V_th / (J_E * C_E * tau_m)  # rate of an external neuron in ms^-1
p_rate = 1000.0 * nu_ex * C_E  # rate of the external population in s^-1

# Set parameters of the NEST simulation kernel
nest.SetKernelStatus({"print_time": True, "local_num_threads": 2})

# Create and seed RNGs
ms = 1000  # master seed
n_vp = nest.GetKernelStatus('total_num_virtual_procs')
pyrngs = [numpy.random.RandomState(s) for s in range(ms, ms + n_vp)]
nest.SetKernelStatus({
    'grng_seed': ms + n_vp,
    'rng_seeds': range(ms + n_vp + 1, ms + 1 + 2 * n_vp)
})

nest.SetDefaults(
    "iaf_psc_delta", {
        "C_m": 1.0,
        "tau_m": tau_m,
        "t_ref": 2.0,
コード例 #8
0
ファイル: DBCmodel.py プロジェクト: ModelDBRepository/257610
# coding: utf-8

import numpy as np, pylab as plt
import nest
import seaborn as sns

sns.set()
import sys

sys.path.insert(0, '/home/nik/Documents/BCPNN_NEST_Module'
                )  #Python checks and inserts the new directory
import BCPNN  # 'pt_module'

nest.ResetKernel()
BCPNN.InstallBCPNN()
nest.SetKernelStatus({'resolution': 0.001})
sns.set(font_scale=1.7)

syn_ports = {'AMPA': 1, 'NMDA': 2, 'GABA': 3}  #receptor types
f_desired = 7.5
f_max = 55.

#In the DBCmodel.py, the parameters we used to achieve satisfactory electrophysiological fidelity are included.
#The simulations aim at reproducing spike patterns under sweeps of increasing  suprathreshold current
#steps (10 pA each) and other reported activity. The range of the stimulation input current is on the same
#level with the one reported in the paper below.

#The spike patterns produced (figure DBC_ActivityPatterns) can be directly compared with the findings of fig.4B appeared in Cluster
#analysis–Based Physiological Classification and Morphological Properties of Inhibitory
#Neurons in Layers 2–3 of Monkey Dorsolateral Prefrontal Cortex (Krimer et al., 2005).
コード例 #9
0
ファイル: simulator.py プロジェクト: anthrax3/PyNN
 def _set(self, val):
     try:
         nest.SetKernelStatus({name: dtype(val)})
     except nest.NESTError as e:
         reraise(e, "%s = %s (%s)" % (name, val, type(val)))
コード例 #10
0
    def __init__(self,
                 nStates,
                 nActions,
                 weightOffset,
                 scalingFactor,
                 evaluationIter=100,
                 lam=False,
                 weightParams=None,
                 hdf=True):
        '''
        Constructor
        '''

        nest.ResetKernel()
        nest.hl_api.set_verbosity(
            'M_ERROR')  # Do not print stuff during simulation
        nest.SetKernelStatus({
            'print_time': False,
            'local_num_threads': 1
        })  # Number of threads used

        self.nActions = nActions
        self.nStates = nStates

        self.Q = np.zeros((nStates, nActions))
        self.Q1 = np.zeros((nStates, nActions))

        self.moveCntRst = 100
        self.moveCnt = self.moveCntRst
        self.eta = None

        self.weightOffset = weightOffset
        self.scalingFactor = scalingFactor

        if lam != 0.:
            self.eligibilityTraces = np.zeros((nStates, nActions))
            self.lam = True
        else:
            self.lam = False

        self.states = []
        self.actions = []
        self.rewards = []
        self.eventsTimes = []
        self.eventAddresses = []
        '''Hold lists to enable overall evaluation for LTL'''
        self.accumulatedStates = []
        self.accumulatedActions = []
        self.accumulatedRewards = []

        self.QDistance = []
        self.currentIteration = 0
        self.evaluationIter = evaluationIter

        self.discreteWeights = False

        if weightParams != None:
            self.discreteWeights = True
            self.minWeight = weightParams['minWeight']
            self.weightStep = (
                weightParams['maxWeight'] -
                weightParams['minWeight']) / weightParams['resolution']
コード例 #11
0
def main():
    nest.SetKernelStatus({'print_time': True, 'local_num_threads': 11})

    sim_time = 200000

    # stimulus
    # A time step
    # Start at 300 ms, and go to the end of the video at steps of 300
    # 300, 600, 900, ...
    stim_interval = 300

    # How often we take a sample
    stim_length = 50

    # How far ahead in the video we try to predict ??
    # rate?

    # Frequency in hertz
    stim_rate = 200  # [1/s]

    # delta sample ??
    readout_delay = 10

    # Returns evenly spaced value within an interval
    # Picking stimulus times within the interval
    # start, stop, step
    stim_times = np.arange(stim_interval,
                           sim_time - stim_length - readout_delay,
                           stim_interval)
    readout_times = stim_times + stim_length + readout_delay

    def gen_stimulus_pattern():
        return poisson_generator(stim_rate, t_stop=stim_length)

    # input spikes is a list of spikes and targets is an int (either 0 or 1)
    inp_spikes, targets = generate_stimulus_xor(stim_times,
                                                gen_burst=gen_stimulus_pattern)

    # Excitatory = 1000, inhibitory = 250, recorded = 500
    lsm = LSM(n_exc=1000, n_inh=250, n_rec=500)

    # random spikes are injected to the input neurons where each are connected
    # to 100 liquid(excitetory) neurons.
    inject_spikes(inp_spikes, lsm.inp_nodes)

    # SIMULATE
    nest.Simulate(sim_time)

    readout_times = readout_times[5:]
    targets = targets[5:]

    states = lsm.get_states(readout_times, tau=20)

    # add constant component to states for bias (1) (TODO why?)
    # hstack -> concatenates 2 matrices into one horizontal vector
    states = np.hstack([states, np.ones((np.size(states, 0), 1))])

    n_examples = np.size(targets, 0)
    n_examples_train = int(n_examples * 0.8)

    # split the states into the training states and the testing
    # 80% training, 20% testing
    train_states, test_states = states[:n_examples_train, :], states[
        n_examples_train:, :]
    train_targets, test_targets = targets[:n_examples_train], targets[
        n_examples_train:]

    readout_weights = lsm.compute_readout_weights(train_states,
                                                  train_targets,
                                                  reg_fact=5.0)

    def classify(prediction):
        return (prediction >= 0.5).astype(int)

    # Training

    # X * w
    train_prediction = lsm.compute_prediction(train_states, readout_weights)

    # classifiesthe prediction as 0s and 1s
    train_results = classify(train_prediction)

    # Testing
    test_prediction = lsm.compute_prediction(test_states, readout_weights)
    test_results = classify(test_prediction)

    print("simulation time: {}ms".format(sim_time))
    print("number of stimuli: {}".format(len(stim_times)))
    print("size of each state: {}".format(np.size(states, 1)))

    print("---------------------------------------")

    def eval_prediction(prediction, targets, label):
        # n_successes = sum(prediction == targets) ?
        n_fails = sum(abs(prediction - targets))
        n_total = len(targets)
        print("mismatched {} examples: {:d}/{:d} [{:.1f}%]".format(
            label, n_fails, n_total, n_fails / n_total * 100))

    eval_prediction(train_results, train_targets, "training")
    eval_prediction(test_results, test_targets, "test")
コード例 #12
0
'''
First, we import all necessary modules for simulation, analysis and
plotting.
'''

import nest
import matplotlib.pyplot as plt
import numpy as np

nest.ResetKernel()  # in case we run the script multiple times from iPython
'''
We first create a figure for the plot and set the resolution of NEST.
'''

plt.figure()
nest.SetKernelStatus({'resolution': 0.01})
'''
Then we create two instances of the `sinusoidal_gamma_generator`
with two different orders of the underlying gamma process using
`Create`. Moreover, we create devices to record firing rates
(`multimeter`) and spikes (`spike_detector`) and connect them to the
generators using `Connect`.

'''

g = nest.Create('sinusoidal_gamma_generator',
                n=2,
                params=[{
                    'rate': 10000.0,
                    'amplitude': 5000.0,
                    'frequency': 10.0,
コード例 #13
0
    state_mat = state_mat.T
    signal_pred = LinearRegression(n_jobs=-1, fit_intercept=False, normalize=True, copy_X=False).fit(state_mat, signal).predict(state_mat)
    MSE_pred = np.mean((signal-signal_pred)**2)
    return signal_pred, 1. - (MSE_pred / np.var(signal)), MSE_pred

# parameters
T = 1000  # total number of time steps
dt = 0.1  # simulation resolution
nEnc = 1000 # neurons encoding layer
J_bias = 200. # [pA]

# Initialize NEST
np.random.seed(42)
nest.ResetKernel()
nest.SetKernelStatus({
    'resolution': dt,
    'print_time': True,
    'local_num_threads': 8})

tuning = 250. * np.random.randn(nEnc) + 1000.

# randomize thresholds and initial states
thresholds = 5 * np.random.randn(nEnc) - 50.
Vm0 = np.array(np.random.uniform(low=-70., high=-50., size=int(nEnc)))

for u_vals in [0., 0.1, 0.2, 0.3, 0.4, 0.5]:

    d_vals = int(sys.argv[1])

    print("computing capacity for offset: {0} and duration: {1}".format(str(u_vals), str(d_vals)))

    # Initialize NEST
コード例 #14
0
    def setUp(self):
        # test parameter to compare analytic solution to simulation
        self.rtol = 1.0

        # test parameters
        self.N = 100
        self.rate_ex = 1.5 * 1e4
        self.J = 0.1

        # simulation parameters
        self.simtime = 500.
        self.dt = 0.1
        self.start = 200.

        nest.set_verbosity('M_WARNING')
        nest.ResetKernel()
        nest.SetKernelStatus({
            'resolution': self.dt,
            'use_wfr': False,
            'print_time': True
        })

        # set up driven integrate-and-fire neuron

        self.iaf_psc_delta = nest.Create('iaf_psc_delta',
                                         self.N)  # , params={"C_m": 1.0})

        self.poisson_generator = nest.Create('poisson_generator',
                                             params={'rate': self.rate_ex})
        nest.Connect(self.poisson_generator,
                     self.iaf_psc_delta,
                     syn_spec={
                         'weight': self.J,
                         'delay': self.dt
                     })

        self.spike_detector = nest.Create("spike_detector",
                                          params={'start': self.start})
        nest.Connect(self.iaf_psc_delta, self.spike_detector)

        # set up driven siegert neuron

        neuron_status = nest.GetStatus(self.iaf_psc_delta)[0]
        siegert_params = {
            'tau_m': neuron_status['tau_m'],
            't_ref': neuron_status['t_ref'],
            'theta': neuron_status['V_th'] - neuron_status['E_L'],
            'V_reset': neuron_status['V_reset'] - neuron_status['E_L']
        }
        self.siegert_neuron = nest.Create('siegert_neuron',
                                          params=siegert_params)

        self.siegert_drive = nest.Create('siegert_neuron',
                                         1,
                                         params={
                                             'mean': self.rate_ex,
                                             'theta': siegert_params['theta']
                                         })
        J_mu_ex = neuron_status['tau_m'] * 1e-3 * self.J
        J_sigma_ex = neuron_status['tau_m'] * 1e-3 * self.J**2
        syn_dict = {
            'drift_factor': J_mu_ex,
            'diffusion_factor': J_sigma_ex,
            'model': 'diffusion_connection'
        }
        nest.Connect(self.siegert_drive,
                     self.siegert_neuron,
                     syn_spec=syn_dict)

        self.multimeter = nest.Create("multimeter",
                                      params={
                                          'record_from': ['rate'],
                                          'interval': self.dt
                                      })
        nest.Connect(self.multimeter, self.siegert_neuron)
コード例 #15
0
spiking_mode_loop = ['regular_spiking','fast_spiking', 'chattering']
#spiking_mode_loop = np.array([1.,3.,5.])
fig = pl.figure(1,(8,5))
#ax = fig.add_axes([0.55,0.2,0.33,0.7])
#ax = fig.add_subplot(111)	
#ax.set_xlabel('Time (ms)')
#ax.set_ylabel('Membrane potential (mV)')
labelsize = 14
ticksize = 14
col_lis = [np.array([63.,25.,255.])/255., np.array([204.,99.,20.])/255.,np.array([178.,120.,76.])/255., np.array([200.,0.,0.])/255.,np.array([153.,88.,61.])/255., np.array([204.,187.,20.])/255.]
for i,spiking_mode in enumerate(spiking_mode_loop):
	file_name_sd = spiking_mode+'_sd'
	file_name_mem = spiking_mode+'_vm'
	nest.ResetKernel()
	nest.SetKernelStatus({'resolution':0.01, 'overwrite_files': True})
	T = 500.
	neuron = nest.Create('psdb')	    
	if spiking_mode == 'regular_spiking':
	    nest.SetStatus([1],{'spb':1.,
				})
			  
	elif spiking_mode == 'fast_spiking':
	    nest.SetStatus([1],{'spb':3.,

				})
	elif spiking_mode == 'chattering':
	    nest.SetStatus([1],{'spb':5.,

				})
				
コード例 #16
0
gate_ampl = np.zeros([len(Param_dc['amplitude']['PY']), 3])

# for each neuron type, simulate the current injection, extract pre-synaptic
# membrane potential and calculate gating variables. AMPA and NMDA kinetics is
# obtained from pre-synaptic PY neuron, GABA from pre-synaptic FS neuron

for type_id in range(2):

    neuron_type = ['PY', 'FS'][type_id]

    #####################################################
    #########   SIMULATE: CURRENT INJECTION     #########
    #####################################################

    nest.ResetKernel()
    nest.SetKernelStatus({"resolution": dt, "local_num_threads": 5})
    # create neurons
    neuron_name = 'compte2003_' + ['ex', 'in'][type_id]
    neuron = nest.Create(neuron_name,
                         n=len(Param_dc['amplitude']['PY']),
                         params=Params_neuron[neuron_type])

    # create and connect DC input
    gen = []
    for gen_id in range(len(Param_dc['amplitude']['PY'])):

        params = {
            'start': Param_dc['start'],
            'stop': Param_dc['stop'],
            'amplitude': Param_dc['amplitude'][neuron_type][gen_id]
        }
コード例 #17
0
import sys
sys.path.insert(0, '/home/nik/Documents/BCPNN_NEST_Module'
                )  #Python checks and inserts the new directory
import BCPNN  # 'pt_module'

# total inhibitory input current received by pyramidal cells in MC0
I_GABA_PYR0_list = []
ts_GABA_PYR0_list = []

# multiple rounds
iterations = 100
for iter in range(iterations):
    print iter
    # Reset nest kernel at the begginning of each iteration
    nest.ResetKernel()
    nest.SetKernelStatus({'resolution': 0.1})
    seed = int(time.time() * 1534.0)
    nest.SetKernelStatus({'rng_seeds': [seed]})
    BCPNN.InstallBCPNN()

    syn_ports = {'AMPA': 1, 'NMDA': 2, 'GABA': 3}  # receptor types
    f_desired = 1.  #for pyramidal and basket cells
    f_max = 20.  #for pyramidal and basket cells

    f_desiredDBC = 7.5  #for DBCs
    f_maxDBC = 55.  # for DBCs

    NRN = {
        'cell_model': 'aeif_cond_exp_multisynapse',
        'neuron_params': {
            'AMPA_NEG_E_rev':
コード例 #18
0
# -*- coding: utf-8 -*-
"""
"""
# pragma: no cover

__author__ = 'Martin Schulze'

import nest
import hbp_nrp_cle.tf_framework as nrp
import logging
import pyNN.nest as sim
import numpy as np
from pyNN.nest import *

nest.SetKernelStatus({'dict_miss_is_error': False})
logger = logging.getLogger(__name__)


def create_brain():
    """
    Initializes PyNN with the neuronal network that has to be simulated
    """

    nest.ResetKernel()
    # sim.setup(timestep=0.1, min_delay=0.1, max_delay=20.0, threads=2, debug=True)

    INPUT_OUTPUT_PARAMS = {
        # 'v_thresh':      -55.0,
        # 'tau_m':     100.0,
        # 'cm':       100.0,
        # 'v_reset':   -60.0
コード例 #19
0
ファイル: test.py プロジェクト: jougs/nestio-tools
import nest

nest.SetKernelStatus({
    "local_num_threads": 2,
    "resolution": .5,
    'recording': {
        'logger': 'SIONLogger'
    }
})

nrns = nest.Create('iaf_psc_alpha', 4)

nest.SetStatus(nrns, 'I_e', 1000.)

meter = nest.Create("multimeter", params={'stop': 3.})
nest.SetStatus(meter, {'record_from': ['V_m'], 'interval': .5, 'stop': 3.})
detector = nest.Create("spike_detector")
nest.SetStatus(detector, {'start': 0., 'stop': 10.})

nest.Connect(meter, nrns)
nest.Connect(nrns, detector)

nest.Simulate(15.)
コード例 #20
0
syn_param = {
    "tau_psc": Tau_psc,
    "tau_rec": Tau_rec,
    "tau_fac": Tau_fac,
    "U": U,
    "delay": 0.1,
    "weight": A,
    "u": 0.0,
    "x": 1.0
}
'''
Third, we reset the kernel and set the resolution using `SetKernelStatus`.
'''

nest.ResetKernel()
nest.SetKernelStatus({"resolution": h})
'''
Fourth, the nodes are created using `Create`. We store the returned
handles in variables for later reference.
'''

neurons = nest.Create("iaf_psc_exp", 2)
dc_gen = nest.Create("dc_generator")
volts = nest.Create("voltmeter")
'''
Fifth, the `iaf_psc_exp`-neurons, the `dc_generator` and the
`voltmeter` are configured using `SetStatus`, which expects a list of
node handles and a parameter dictionary or a list of parameter
dictionaries.
'''
コード例 #21
0
        nest.SetStatus(
            nest.GetConnections(self.noiseID['STN'], self.nID['STN']),
            {'weight': J})
        print('Set rate for CTX GPTI to: ', J)

    #	def setJD2(self,J):


#paramNEW= t.tune(param.nparam, param.cparamOLD, param.staticsyn, param.noise, param.staticsynNoise, param.connections)
nest.ResetKernel()

BG1 = varBG(param.nparam, param.cparam, param.staticsyn, param.noise,
            param.staticsynNoise, param.connections, param.pparam)
nest.SetKernelStatus({
    'data_path':
    '/Users/kimhedelin/Google Drive/VT18/Neuroscience/simulation/BGnetwork/BGcore/variableBG/data/',
    'overwrite_files': True
})
#BG1.connectMultimeterNew(to_file = True)
BG1.connectSpikeDet()
BG1.setIe(0.0)

vary = False
varyCTX = False
varyNoiseJ = False
vary1000 = False
varyLow = True

if vary:
    rate = [2400.0, 3000.0]
    JGPTI = [-0.09, -1.5]
コード例 #22
0
# Simulate iaf_cond_alpha using multimeter, voltmeter and conductancemeter 
# for comparative measurement

import nest
import numpy as np
import pylab as pl

nest.ResetKernel()

nest.SetKernelStatus({'overwrite_files': True})

n = nest.Create('iaf_cond_alpha')

# cannot use dict as params = in Create()
m = nest.Create('multimeter')
nest.SetStatus(m, {'interval': 0.5, 'start': 0.0, 'stop': 45.0,
                   'record_from': ['V_m', 'g_ex', 'g_in']})

v = nest.Create('voltmeter', params = {'interval': 0.5, 'withtime': True, 'start': 0.0, 'stop': 45.0})
c = nest.Create('conductancemeter', params = {'interval': 0.5,'withtime': True, 'start': 0.0, 'stop': 45.0})

gex = nest.Create('spike_generator',
                  params = {'spike_times': np.array([10.0, 20.0, 50.0])})
gin = nest.Create('spike_generator',
                  params = {'spike_times': np.array([15.0, 25.0, 55.0])})

nest.Connect(gex, n, params={'weight': 200.0})
nest.Connect(gin, n, params={'weight': -10.0})
nest.Connect(m, n)
nest.Connect(v, n)
nest.Connect(c, n)
コード例 #23
0
    def run_protocol(self, pre_post_shift):
        """
        Create network and simulate for each delta value.

        Returns a dict with the synaptic weight at end of simulation for
        plain and precise parrots, one weight per delta value.

        All values for the plain parrot case should be identical, and
        the values for the precise parrot case should converge to that value
        for delta -> 0.

        All delta values must fulfill

           multiplicity * delta < resolution / 2

        so that in the plain case off-grid spike times are rounded up
        to the end of the step and thus belong to the same step as the
        corresponding precise spikes.

        :param pre_post_shift: Delay between pre- and postsynaptic trains
        :returns: {'parrot': [<weights>], 'parrot_ps': [<weights>]}
        """

        multiplicity = 2**3
        resolution = 2.**-4
        tics_per_ms = 1. / resolution * multiplicity * 4
        deltas = [resolution / multiplicity / 2**m for m in range(2, 10)]

        delay = 1.

        # k spikes will be emitted at these two times
        pre_spike_times_base = [100., 200.]

        nest.set_verbosity("M_WARNING")

        post_weights = {'parrot': [], 'parrot_ps': []}

        for delta in deltas:
            assert multiplicity * delta < resolution / 2., "Test inconsistent."

            nest.ResetKernel()
            nest.SetKernelStatus({
                'tics_per_ms': tics_per_ms,
                'resolution': resolution
            })

            pre_times = sorted(t_base - k * delta
                               for t_base in pre_spike_times_base
                               for k in range(multiplicity))
            post_times = [pre_time + pre_post_shift for pre_time in pre_times]

            # create spike_generators with these times
            pre_sg = nest.Create("spike_generator",
                                 params={
                                     "spike_times": pre_times,
                                     'allow_offgrid_spikes': True
                                 })
            post_sg = nest.Create("spike_generator",
                                  params={
                                      "spike_times": post_times,
                                      'allow_offgrid_spikes': True
                                  })
            pre_sg_ps = nest.Create("spike_generator",
                                    params={
                                        "spike_times": pre_times,
                                        'precise_times': True
                                    })
            post_sg_ps = nest.Create("spike_generator",
                                     params={
                                         "spike_times": post_times,
                                         'precise_times': True
                                     })

            # create parrot neurons and connect spike_generators
            pre_parrot = nest.Create("parrot_neuron")
            post_parrot = nest.Create("parrot_neuron")
            pre_parrot_ps = nest.Create("parrot_neuron_ps")
            post_parrot_ps = nest.Create("parrot_neuron_ps")

            nest.Connect(pre_sg, pre_parrot, syn_spec={"delay": delay})
            nest.Connect(post_sg, post_parrot, syn_spec={"delay": delay})
            nest.Connect(pre_sg_ps, pre_parrot_ps, syn_spec={"delay": delay})
            nest.Connect(post_sg_ps, post_parrot_ps, syn_spec={"delay": delay})

            # create spike detector --- debugging only
            spikes = nest.Create("spike_detector",
                                 params={'precise_times': True})
            nest.Connect(
                pre_parrot + post_parrot + pre_parrot_ps + post_parrot_ps,
                spikes)

            # connect both parrot neurons with a stdp synapse onto port 1
            # thereby spikes transmitted through the stdp connection are
            # not repeated postsynaptically.
            nest.Connect(pre_parrot,
                         post_parrot,
                         syn_spec={
                             'model': 'stdp_synapse',
                             'receptor_type': 1
                         })
            nest.Connect(pre_parrot_ps,
                         post_parrot_ps,
                         syn_spec={
                             'model': 'stdp_synapse',
                             'receptor_type': 1
                         })

            # get STDP synapse and weight before protocol
            syn = nest.GetConnections(source=pre_parrot,
                                      synapse_model="stdp_synapse")
            w_pre = nest.GetStatus(syn)[0]['weight']
            syn_ps = nest.GetConnections(source=pre_parrot_ps,
                                         synapse_model="stdp_synapse")
            w_pre_ps = nest.GetStatus(syn)[0]['weight']

            sim_time = max(pre_times + post_times) + 5 * delay
            nest.Simulate(sim_time)

            # get weight post protocol
            w_post = nest.GetStatus(syn)[0]['weight']
            w_post_ps = nest.GetStatus(syn_ps)[0]['weight']

            assert w_post != w_pre, "Plain parrot weight did not change."
            assert w_post_ps != w_pre_ps, "Precise parrot \
                weight did not change."

            post_weights['parrot'].append(w_post)
            post_weights['parrot_ps'].append(w_post_ps)

        return post_weights
コード例 #24
0
    def test_traub_psc_alpha(self):

        if not os.path.exists("target"):
            os.makedirs("target")

        input_path = os.path.join(
            os.path.realpath(
                os.path.join(os.path.dirname(__file__), "../../models/neurons",
                             "traub_psc_alpha.nestml")))
        target_path = "target"
        module_name = 'nestmlmodule'
        nest_path = nest.ll_api.sli_func("statusdict/prefix ::")
        suffix = '_nestml'

        to_nest(input_path=input_path,
                target_path=target_path,
                logging_level="INFO",
                suffix=suffix,
                module_name=module_name)

        install_nest(target_path, nest_path)

        nest.Install("nestmlmodule")
        model = "traub_psc_alpha_nestml"

        dt = 0.01
        t_simulation = 1000.0
        nest.SetKernelStatus({"resolution": dt})

        neuron = nest.Create(model)
        parameters = nest.GetDefaults(model)

        neuron.set({'I_e': 130.0})
        multimeter = nest.Create("multimeter")
        multimeter.set({"record_from": ["V_m"], "interval": dt})
        spike_recorder = nest.Create("spike_recorder")
        nest.Connect(multimeter, neuron)
        nest.Connect(neuron, spike_recorder)
        nest.Simulate(t_simulation)

        dmm = nest.GetStatus(multimeter)[0]
        Voltages = dmm["events"]["V_m"]
        tv = dmm["events"]["times"]
        dSD = nest.GetStatus(spike_recorder, keys='events')[0]
        spikes = dSD['senders']
        ts = dSD["times"]

        firing_rate = len(spikes) / t_simulation * 1000
        print("firing rate is ", firing_rate)
        expected_value = np.abs(firing_rate - 50)
        tolerance_value = 5  # Hz

        self.assertLessEqual(expected_value, tolerance_value)

        if TEST_PLOTS:

            fig, ax = plt.subplots(2, figsize=(8, 6), sharex=True)
            ax[0].plot(tv, Voltages, lw=2, color="k")
            ax[1].plot(ts, spikes, 'ko')
            ax[1].set_xlabel("Time [ms]")
            ax[1].set_xlim(0, t_simulation)
            ax[1].set_ylabel("Spikes")
            ax[0].set_ylabel("v [ms]")
            ax[0].set_ylim(-100, 50)

            for i in ts:
                ax[0].axvline(x=i, lw=1., ls="--", color="gray")

            plt.savefig("traub_psc_alpha.png")
コード例 #25
0
    def test_QuantalSTPSynapse(self):
        """Compare quantal_stp_synapse with its deterministic equivalent"""
        nest.ResetKernel()
        nest.SetKernelStatus({'rng_seed': 1})
        nest.set_verbosity(100)
        n_syn = 12  # number of synapses in a connection
        n_trials = 100  # number of measurement trials

        # parameter set for facilitation
        fac_params = {"U": 0.03, "u": 0.03,
                      "tau_fac": 500., "tau_rec": 200., "weight": 1.}

        # Here we assign the parameter set to the synapse models
        t1_params = fac_params       # for tsodyks2_synapse
        t2_params = t1_params.copy()  # for furhmann_synapse

        t2_params['n'] = n_syn
        t2_params['weight'] = 1. / n_syn

        nest.SetDefaults("tsodyks2_synapse", t1_params)
        nest.SetDefaults("quantal_stp_synapse", t2_params)
        nest.SetDefaults("iaf_psc_exp", {"tau_syn_ex": 3., 'tau_m': 70.})

        source = nest.Create('spike_generator')
        nest.SetStatus(
            source,
            {
                'spike_times': [
                    30., 60., 90., 120., 150., 180., 210., 240.,
                    270., 300., 330., 360., 390., 900.]
            }
        )

        parrot = nest.Create('parrot_neuron')
        neuron = nest.Create("iaf_psc_exp", 2)

        # We must send spikes via parrot because devices cannot
        # connect through plastic synapses
        # See #478.
        nest.Connect(source, parrot)
        nest.Connect(parrot, neuron[:1], syn_spec="tsodyks2_synapse")
        nest.Connect(parrot, neuron[1:], syn_spec="quantal_stp_synapse")

        voltmeter = nest.Create("voltmeter", 2)

        t_tot = 1500.

        # the following is a dry run trial so that the synapse dynamics is
        # idential in all subsequent trials.

        nest.Simulate(t_tot)

        # Now we connect the voltmeters
        nest.Connect(voltmeter[:1], neuron[:1])
        nest.Connect(voltmeter[1:], neuron[1:])

        for t in range(n_trials):
            t_net = nest.GetKernelStatus('biological_time')
            nest.SetStatus(source, {'origin': t_net})
            nest.Simulate(t_tot)

        nest.Simulate(.1)  # flush the last voltmeter events from the queue

        vm = numpy.array(nest.GetStatus(voltmeter[1], 'events')[0]['V_m'])
        vm_reference = numpy.array(nest.GetStatus(voltmeter[0],
                                                  'events')[0]['V_m'])

        assert(len(vm) % n_trials == 0)
        n_steps = int(len(vm) / n_trials)
        vm.shape = (n_trials, n_steps)
        vm_reference.shape = (n_trials, n_steps)

        vm_mean = numpy.mean(vm, axis=0)
        vm_ref_mean = numpy.mean(vm_reference, axis=0)

        error = numpy.sqrt((vm_ref_mean - vm_mean)**2)
        self.assertLess(numpy.max(error), 4.0e-4)
コード例 #26
0
def runNestModel(model_type, neuron_config, amp_times, amp_vals, dt_ms,
                 simulation_time_ms):
    """Creates and runs a NEST glif object and returns the voltages and spike-times"""

    # By default NEST has a 0.1 ms resolution which is the which can causes integration issues due to glif_lif_asc
    # using explicit euler method
    nest.ResetKernel()
    nest.SetKernelStatus({'resolution': dt_ms})
    nest.set_verbosity('M_QUIET')

    if model_type == asdk.LIF:
        neuron = create_lif(neuron_config, dt_ms)
    elif model_type == asdk.LIF_ASC:
        neuron = create_lif_asc(neuron_config, dt_ms)
        multimeter = nest.Create("multimeter",
                                 params={
                                     'record_from': ['AScurrents_sum'],
                                     'withgid': True,
                                     'withtime': True
                                 })
        nest.Connect(multimeter, neuron)
    elif model_type == asdk.LIF_R:
        neuron = create_lif_r(neuron_config, dt_ms)
    elif model_type == asdk.LIF_R_ASC:
        neuron = create_lif_r_asc(neuron_config, dt_ms)
        multimeter = nest.Create("multimeter",
                                 params={
                                     'record_from': ['AScurrents_sum'],
                                     'withgid': True,
                                     'withtime': True
                                 })
        nest.Connect(multimeter, neuron)
    elif model_type == asdk.LIF_R_ASC_A:
        neuron = create_lif_r_asc_a(neuron_config, dt_ms)
        multimeter = nest.Create("multimeter",
                                 params={
                                     'record_from': ['AScurrents_sum'],
                                     'withgid': True,
                                     'withtime': True
                                 })
        nest.Connect(multimeter, neuron)

    # Create voltmeter and spike reader
    voltmeter = nest.Create("voltmeter",
                            params={
                                "withgid": True,
                                "withtime": True,
                                'interval': dt_ms
                            })

    # nest glif model output precision spike time by default
    spikedetector = nest.Create("spike_detector",
                                params={
                                    "withgid": True,
                                    "withtime": True
                                })
    # output grid spike time
    #spikedetector = nest.Create("spike_detector", params={"withgid": True, "withtime": True,  "precise_times": False})
    # output spike time steps together spike offset
    #spikedetector = nest.Create("spike_detector", params={"withgid": True, "withtime": True, "time_in_steps": True})

    nest.Connect(voltmeter, neuron)
    nest.Connect(neuron, spikedetector)

    # Step current
    scg = nest.Create("step_current_generator",
                      params={
                          'amplitude_times': amp_times[1:],
                          'amplitude_values': np.array(amp_vals[1:]) * 1.0e12
                      })  # convert current to pA from A
    #nest.Connect(scg, neuron)
    nest.Connect(scg, neuron, syn_spec={'delay': dt_ms})

    # Simulate, grab run values and return
    nest.Simulate(simulation_time_ms)
    voltages = nest.GetStatus(voltmeter)[0]['events']['V_m']

    times = nest.GetStatus(voltmeter)[0]['events']['times']
    spike_times = nest.GetStatus(spikedetector)[0]['events']['times']

    return times, voltages - neuron_config[
        'El_reference'] * 1.0e03 - neuron_config['El'] * 1.0e03, spike_times
コード例 #27
0
def build_network(logger):
    '''Builds the network including setting of simulation and neuron
    parameters, creation of neurons and connections

    Requires an instance of Logger as argument

    '''

    tic = time.time()  # start timer on construction

    # unpack a few variables for convenience
    NE = brunel_params['NE']
    NI = brunel_params['NI']
    model_params = brunel_params['model_params']
    stdp_params = brunel_params['stdp_params']

    # set global kernel parameters
    nest.SetKernelStatus({
        'total_num_virtual_procs': params['nvp'],
        'resolution': params['dt'],
        'overwrite_files': True
    })

    nest.SetDefaults('iaf_psc_alpha', model_params)

    nest.message(M_INFO, 'build_network', 'Creating excitatory population.')
    E_neurons = nest.Create('iaf_psc_alpha', NE)

    nest.message(M_INFO, 'build_network', 'Creating inhibitory population.')
    I_neurons = nest.Create('iaf_psc_alpha', NI)

    if brunel_params['randomize_Vm']:
        nest.message(M_INFO, 'build_network',
                     'Randomzing membrane potentials.')

        seed = nest.GetKernelStatus('rng_seeds')[-1] + 1 + nest.GetStatus(
            [0], 'vp')[0]
        rng = np.random.RandomState(seed=seed)

        for node in get_local_nodes(E_neurons):
            nest.SetStatus(
                [node], {
                    'V_m':
                    rng.normal(brunel_params['mean_potential'],
                               brunel_params['sigma_potential'])
                })

        for node in get_local_nodes(I_neurons):
            nest.SetStatus(
                [node], {
                    'V_m':
                    rng.normal(brunel_params['mean_potential'],
                               brunel_params['sigma_potential'])
                })

    # number of incoming excitatory connections
    CE = int(1. * NE / params['scale'])
    # number of incomining inhibitory connections
    CI = int(1. * NI / params['scale'])

    nest.message(M_INFO, 'build_network',
                 'Creating excitatory stimulus generator.')

    # Convert synapse weight from mV to pA
    conversion_factor = convert_synapse_weight(model_params['tau_m'],
                                               model_params['tau_syn_ex'],
                                               model_params['C_m'])
    JE_pA = conversion_factor * brunel_params['JE']

    nu_thresh = model_params['V_th'] / (CE * model_params['tau_m'] /
                                        model_params['C_m'] * JE_pA *
                                        np.exp(1.) * tau_syn)
    nu_ext = nu_thresh * brunel_params['eta']

    E_stimulus = nest.Create('poisson_generator', 1,
                             {'rate': nu_ext * CE * 1000.})

    nest.message(M_INFO, 'build_network',
                 'Creating excitatory spike detector.')

    if params['record_spikes']:
        detector_label = os.path.join(
            brunel_params['filestem'],
            'alpha_' + str(stdp_params['alpha']) + '_spikes')
        E_detector = nest.Create('spike_detector', 1, {
            'withtime': True,
            'to_file': True,
            'label': detector_label
        })

    BuildNodeTime = time.time() - tic

    logger.log(str(BuildNodeTime) + ' # build_time_nodes')
    logger.log(str(memory_thisjob()) + ' # virt_mem_after_nodes')

    tic = time.time()

    nest.SetDefaults('static_synapse_hpc', {'delay': brunel_params['delay']})
    nest.CopyModel('static_synapse_hpc', 'syn_std')
    nest.CopyModel('static_synapse_hpc', 'syn_ex', {'weight': JE_pA})
    nest.CopyModel('static_synapse_hpc', 'syn_in',
                   {'weight': brunel_params['g'] * JE_pA})

    stdp_params['weight'] = JE_pA
    nest.SetDefaults('stdp_pl_synapse_hom_hpc', stdp_params)

    nest.message(M_INFO, 'build_network', 'Connecting stimulus generators.')

    # Connect Poisson generator to neuron

    nest.Connect(E_stimulus, E_neurons, {'rule': 'all_to_all'},
                 {'model': 'syn_ex'})
    nest.Connect(E_stimulus, I_neurons, {'rule': 'all_to_all'},
                 {'model': 'syn_ex'})

    nest.message(M_INFO, 'build_network',
                 'Connecting excitatory -> excitatory population.')

    nest.Connect(
        E_neurons, E_neurons, {
            'rule': 'fixed_indegree',
            'indegree': CE,
            'autapses': False,
            'multapses': True
        }, {'model': 'stdp_pl_synapse_hom_hpc'})

    nest.message(M_INFO, 'build_network',
                 'Connecting inhibitory -> excitatory population.')

    nest.Connect(
        I_neurons, E_neurons, {
            'rule': 'fixed_indegree',
            'indegree': CI,
            'autapses': False,
            'multapses': True
        }, {'model': 'syn_in'})

    nest.message(M_INFO, 'build_network',
                 'Connecting excitatory -> inhibitory population.')

    nest.Connect(
        E_neurons, I_neurons, {
            'rule': 'fixed_indegree',
            'indegree': CE,
            'autapses': False,
            'multapses': True
        }, {'model': 'syn_ex'})

    nest.message(M_INFO, 'build_network',
                 'Connecting inhibitory -> inhibitory population.')

    nest.Connect(
        I_neurons, I_neurons, {
            'rule': 'fixed_indegree',
            'indegree': CI,
            'autapses': False,
            'multapses': True
        }, {'model': 'syn_in'})

    if params['record_spikes']:
        local_neurons = list(get_local_nodes(E_neurons))

        if len(local_neurons) < brunel_params['Nrec']:
            nest.message(
                M_ERROR, 'build_network',
                '''Spikes can only be recorded from local neurons, but the
                number of local neurons is smaller than the number of neurons
                spikes should be recorded from. Aborting the simulation!''')
            exit(1)

        nest.message(M_INFO, 'build_network', 'Connecting spike detectors.')
        nest.Connect(local_neurons[:brunel_params['Nrec']], E_detector,
                     'all_to_all', 'static_synapse_hpc')

    # read out time used for building
    BuildEdgeTime = time.time() - tic

    logger.log(str(BuildEdgeTime) + ' # build_edge_time')
    logger.log(str(memory_thisjob()) + ' # virt_mem_after_edges')

    return E_detector if params['record_spikes'] else None
コード例 #28
0
def simulate():
    nest.ResetKernel()

    nest.SetKernelStatus({'local_num_threads': 4})

    data = request.get_json()
    nodes = data['nodes']
    nodes['neuron']['params'] = dict(
        zip(nodes['neuron']['params'].keys(),
            map(float, nodes['neuron']['params'].values())))
    nodes['input']['params'] = dict(
        zip(nodes['input']['params'].keys(),
            map(float, nodes['input']['params'].values())))

    npop = 500
    pop = nest.Create(nodes['neuron']['model'],
                      npop,
                      params=nodes['neuron']['params'])
    popE, popI = pop[:int(npop * .8)], pop[int(npop * .8):]
    noise = nest.Create('noise_generator')
    input = nest.Create(nodes['input']['model'],
                        params=nodes['input']['params'])

    sd = nest.Create('spike_detector')

    nest.Connect(input, pop)
    # nest.Connect(pop[::5],sd)
    nest.Connect(pop, sd)

    nest.SetStatus(noise, {'std': 1000.})
    nest.Simulate(100.)
    nest.SetStatus(noise, {'std': 0.})
    events = nest.GetStatus(sd, 'events')[0]

    p = .1
    nest.Connect(
        popE,
        pop,
        # conn_spec={'rule': 'fixed_outdegree', 'outdegree': int(p*npop), 'autapses': False, 'multapses': True},
        conn_spec={
            'rule': 'fixed_indegree',
            'indegree': int(p * npop),
            'autapses': False,
            'multapses': True
        },
        syn_spec={'weight': 10.})
    nest.Connect(
        popI,
        pop,
        # conn_spec={'rule': 'fixed_outdegree', 'outdegree': int(p*npop), 'autapses': False, 'multapses': True},
        conn_spec={
            'rule': 'fixed_indegree',
            'indegree': int(p * npop),
            'autapses': False,
            'multapses': True
        },
        syn_spec={'weight': -40.})

    nest.Simulate(data['sim_time'])
    time = nest.GetKernelStatus('time')

    events = nest.GetStatus(sd, 'events')[0]
    nest.SetStatus(sd, {'n_events': 0})

    events = dict(map(lambda (x, y): (x, y.tolist()), events.items()))
    return jsonify(events=events, time=time, pop=pop)
def simulate(parameters):
    # Simulates the network and returns recorded spikes for excitatory
    # and inhibitory population

    # Code taken from brunel_alpha_nest.py

    def LambertWm1(x):
        # Using scipy to mimic the gsl_sf_lambert_Wm1 function.
        return sp.lambertw(x, k=-1 if x < 0 else 0).real

    def ComputePSPnorm(tauMem, CMem, tauSyn):
        a = (tauMem / tauSyn)
        b = (1.0 / tauSyn - 1.0 / tauMem)

        # time of maximum
        t_max = 1.0 / b * (-LambertWm1(-np.exp(-1.0 / a) / a) - 1.0 / a)

        # maximum of PSP for current of unit amplitude
        return (np.exp(1.0) / (tauSyn * CMem * b) *
                ((np.exp(-t_max / tauMem) - np.exp(-t_max / tauSyn)) / b -
                 t_max * np.exp(-t_max / tauSyn)))

    # number of excitatory neurons
    NE = int(parameters['gamma'] * parameters['N'])
    # number of inhibitory neurons
    NI = parameters['N'] - NE

    # number of excitatory synapses per neuron
    CE = int(parameters['epsilon'] * NE)
    # number of inhibitory synapses per neuron
    CI = int(parameters['epsilon'] * NI)

    tauSyn = 0.5  # synaptic time constant in ms
    tauMem = 20.0  # time constant of membrane potential in ms
    CMem = 250.0  # capacitance of membrane in in pF
    theta = 20.0  # membrane threshold potential in mV
    neuron_parameters = {
        'C_m': CMem,
        'tau_m': tauMem,
        'tau_syn_ex': tauSyn,
        'tau_syn_in': tauSyn,
        't_ref': 2.0,
        'E_L': 0.0,
        'V_reset': 0.0,
        'V_m': 0.0,
        'V_th': theta
    }
    J = 0.1  # postsynaptic amplitude in mV
    J_unit = ComputePSPnorm(tauMem, CMem, tauSyn)
    J_ex = J / J_unit  # amplitude of excitatory postsynaptic current
    # amplitude of inhibitory postsynaptic current
    J_in = -parameters['g'] * J_ex

    nu_th = (theta * CMem) / (J_ex * CE * np.exp(1) * tauMem * tauSyn)
    nu_ex = parameters['eta'] * nu_th
    p_rate = 1000.0 * nu_ex * CE

    nest.ResetKernel()
    nest.set_verbosity('M_FATAL')

    nest.SetKernelStatus({
        'rng_seeds': [parameters['seed']],
        'resolution': parameters['dt']
    })

    nest.SetDefaults('iaf_psc_alpha', neuron_parameters)
    nest.SetDefaults('poisson_generator', {'rate': p_rate})

    nodes_ex = nest.Create('iaf_psc_alpha', NE)
    nodes_in = nest.Create('iaf_psc_alpha', NI)
    noise = nest.Create('poisson_generator')
    espikes = nest.Create('spike_detector', params={'label': 'brunel-py-ex'})
    ispikes = nest.Create('spike_detector', params={'label': 'brunel-py-in'})

    nest.CopyModel('static_synapse', 'excitatory', {
        'weight': J_ex,
        'delay': parameters['delay']
    })
    nest.CopyModel('static_synapse', 'inhibitory', {
        'weight': J_in,
        'delay': parameters['delay']
    })

    nest.Connect(noise, nodes_ex, syn_spec='excitatory')
    nest.Connect(noise, nodes_in, syn_spec='excitatory')

    if parameters['N_rec'] > NE:
        raise ValueError('Requested recording from {} neurons, \
            but only {} in excitatory population'.format(
            parameters['N_rec'], NE))
    if parameters['N_rec'] > NI:
        raise ValueError('Requested recording from {} neurons, \
            but only {} in inhibitory population'.format(
            parameters['N_rec'], NI))
    nest.Connect(nodes_ex[:parameters['N_rec']], espikes)
    nest.Connect(nodes_in[:parameters['N_rec']], ispikes)

    conn_parameters_ex = {'rule': 'fixed_indegree', 'indegree': CE}
    nest.Connect(nodes_ex, nodes_ex + nodes_in, conn_parameters_ex,
                 'excitatory')

    conn_parameters_in = {'rule': 'fixed_indegree', 'indegree': CI}
    nest.Connect(nodes_in, nodes_ex + nodes_in, conn_parameters_in,
                 'inhibitory')

    nest.Simulate(parameters['sim_time'])

    return (espikes.events, ispikes.events)
コード例 #30
0
import sys

print("Getting comm")
from mpi4py import MPI
comm = MPI.COMM_WORLD.Split(0)  # is nest

print("Getting nest")
import nest

STATUS_DICT = nest.ll_api.sli_func("statusdict")
if (not STATUS_DICT["have_recordingbackend_arbor"]):
    print("Recording backend Arbor available. Exit testscript!")
    sys.exit(1)

nest.set_communicator(comm)
nest.SetKernelStatus({'recording_backends': {'arbor': {}}})

print("Building network")
pg = nest.Create('poisson_generator', params={'rate': 10.0})

# We cannot directly record from poisson_generator due to implementation
# details. Create a parrot and connect the recorder to that
parrots = nest.Create('parrot_neuron', 100)
nest.Connect(pg, parrots)

sd2 = nest.Create('spike_detector', params={"record_to": "arbor"})
nest.Connect(parrots, sd2)

status = nest.GetKernelStatus()
print('min_delay: ', status['min_delay'], ", max_delay: ", status['max_delay'])
print("Simulate")