示例#1
0
def set_sampling_parameters(sampling_times, input_signal_set, input_signal):
    """

    :param sampling_times:
    :param input_signal_set:
    :param input_signal:
    :return:
    """
    # TODO remove when implemented
    if sampling_times is not None:
        raise NotImplementedError(
            'Sampling times is not yet available, use sampling_offsets until then!'
        )

    if sampling_times is None and not input_signal_set.online:
        t_samp = np.sort(
            list(signals.iterate_obj_list(
                input_signal.offset_times)))  # extract stimulus offset times
        sub_sampling_times = None
    elif sampling_times is None and input_signal_set.online:
        t_samp = [round(nest.GetKernelStatus()['time'])
                  ]  # offset times will be specified online, in the main
        # iteration
        sub_sampling_times = None
    elif sampling_times is not None and input_signal_set.online:
        t_samp = [round(nest.GetKernelStatus()['time'])]
        sub_sampling_times = sampling_times
    else:
        t_samp = sampling_times
        sub_sampling_times = None
    return t_samp, sub_sampling_times
示例#2
0
文件: db_main.py 项目: SBottani/NNGT
    def _get_simulation_prop(self, network, simulator):
        '''
        Get the simulation properties.
        
        Parameters
        ----------
        network : :class:`~nngt.Network`
            Network used for the simulation.
        simulator : str
            Name of the simulator use (NEST, BRIAN...).

        Returns
        -------
        sim_prop : dict
            Dictionary containing the relevant key/value pairs to fill the
            :class:`~nngt.database.Simulation` class.
        '''
        pop, size = [], []
        for name, group in iter(network.population.items()):
            pop.append(name)
            size.append(len(group.ids))
        self.current_simulation = {
            'start_time': datetime.now(),
            'simulated_time': nest.GetKernelStatus('time'),
            'resolution': nest.GetKernelStatus('resolution'),
            'simulator': simulator.lower(),
            'grnd_seed': nest.GetKernelStatus('grng_seed'),
            'local_seeds': nest.GetKernelStatus('rng_seeds'),
            'population': pop,
            'pop_sizes': size
        }
    def __run_sim(self, sim_time, str_p, step=1.):
        "Run the sim in bits."
        update_steps = numpy.arange(0, sim_time, step)
        for i in update_steps:
            nest.Simulate(step * 1000.)

            ca = nest.GetStatus(self.neuron_new, ['Ca'])[0][0]
            synelms = nest.GetStatus(self.neuron_new,
                                     ['synaptic_elements'])[0][0]

            print("{}\t{}\t{}\t{}\t{}".format(
                nest.GetKernelStatus()['time'], ca, synelms['Den_ex']['z'],
                synelms['Den_in']['z'], self.weightE * synelms['Den_ex']['z'] -
                self.weightI * synelms['Den_in']['z']),
                  file=self.fh_neuron_new)

            ca = nest.GetStatus(self.neuron_butz, ['Ca'])[0][0]
            synelms = nest.GetStatus(self.neuron_butz,
                                     ['synaptic_elements'])[0][0]
            # In the butz model, the conductances were all 1nS
            print("{}\t{}\t{}\t{}\t{}".format(
                nest.GetKernelStatus()['time'], ca, synelms['Den_ex']['z'],
                synelms['Den_in']['z'],
                synelms['Den_ex']['z'] - synelms['Den_in']['z']),
                  file=self.fh_neuron_butz)
示例#4
0
    def raw_data_filenames(self):
        """Return names of raw data files saved by NEST.

        From NEST documentation:
            ```The name of the output file is
            `data_path/data_prefix(label|model_name)-gid-vp.file_extension`

            See /label and /file_extension for how to change the name.
            /data_prefix is changed in the root node.```

        NB: We don't use the recorder's `filenames` key, since it is created
        only after the first `Simulate` call.

        NB: The label is set at creation.

        NB: There is one file per virtual process. The virtual processes are
        numeroted from 0 and formatted with the same number of digits as that of
        the number of virtual processes.
        """
        import nest
        # TODO: Deal with case where multimeter is only recorded to memory?
        assert 'file' in self._record_to
        assert self._label is not None  # Check that the label has been set
        prefix = (nest.GetKernelStatus('data_prefix') + self._label +
                  f'-{self.gid[0]}-')
        extension = nest.GetStatus(self.gid, 'file_extension')[0]
        n_vp = nest.GetKernelStatus('local_num_threads')
        # TODO: CHeck the formatting for 3 digits number of threads
        assert n_vp <= 100
        n_digits = len(str(n_vp))
        return [
            prefix + f'{str(vp).zfill(n_digits)}.{extension}'
            for vp in range(n_vp)
        ]
示例#5
0
    def logging(self):
        """
        Write runtime and memory for the first 30 MPI processes
        to file.
        """
        d = {
            'time_prepare': self.time_prepare,
            'time_network_local': self.time_network_local,
            'time_network_global': self.time_network_global,
            'time_init': self.time_init,
            'time_simulate': self.time_simulate,
            'base_memory': self.base_memory,
            'network_memory': self.network_memory,
            'init_memory': self.init_memory,
            'total_memory': self.total_memory,
            'time_create': self.time_create,
            'time_connect': self.time_connect,
            'num_connections': nest.GetKernelStatus('num_connections'),
            'local_spike_counter': nest.GetKernelStatus('local_spike_counter')
        }
        print(d)

        if nest.Rank() < 30:
            fn = os.path.join(
                self.data_dir, 'recordings', '_'.join(
                    (self.label, 'logfile', str(nest.Rank()))))
            with open(fn, 'w') as f:
                json.dump(d, f)
def RunSimulation():

    nest.set_verbosity(M_INFO)
    logger = Logger()

    logger.log('{} # virt_mem_0'.format(memory_thisjob()))

    # ----------------------- Network Construction -----------------------------

    BuildNetwork(logger)

    # ---------------- Initial simulation: rig and calibrate -------------------

    tic = time.time()

    nest.Prepare()
    nest.Run(params['inisimtime'])

    InitializationTime = time.time() - tic

    logger.log('{} # init_time'.format(InitializationTime))
    logger.log('{} # virt_mem_after_init'.format(memory_thisjob()))

    # ----------------------- Cleanup and output -------------------------------

    nest.Cleanup()

    logger.log('{} # num_neurons'.format(logger_params['num_nodes']))
    logger.log('{} # num_connections'.format(
        nest.GetKernelStatus('num_connections')))
    logger.log('{} # min_delay'.format(nest.GetKernelStatus('min_delay')))
    logger.log('{} # max_delay'.format(nest.GetKernelStatus('max_delay')))
示例#7
0
 def print_network_size():
     import nest
     print('------------------------')
     print('Network size (without recorders)')
     print('Number of nodes: ', nest.GetKernelStatus('network_size'))
     print('Number of connections: ',
           nest.GetKernelStatus('num_connections'))
     print('------------------------')
示例#8
0
def print_net_information():
    # Function for printing information about network size
    
    print("***************")
    print("Network size: ", nest.GetKernelStatus(["network_size"]))
    print("Num connections: ", nest.GetKernelStatus(["num_connections"]))
    print("Min delay: ", nest.GetKernelStatus(["min_delay"]))
    print("Max delay: ", nest.GetKernelStatus(["max_delay"]))
    print("***************")
示例#9
0
    def test_disconnect_defaults(self):

        nodes = nest.Create('iaf_psc_alpha', 5)
        nest.Connect(nodes, nodes)
        self.assertEqual(nest.GetKernelStatus('num_connections'), 25)

        nest.Disconnect(nodes, nodes)

        self.assertEqual(nest.GetKernelStatus('num_connections'), 20)
示例#10
0
    def test_disconnect_static_synapse(self):

        nodes = nest.Create('iaf_psc_alpha', 5)
        nest.Connect(nodes, nodes)

        self.assertEqual(nest.GetKernelStatus('num_connections'), 25)

        nest.Disconnect(nodes, nodes, syn_spec='static_synapse')

        self.assertEqual(nest.GetKernelStatus('num_connections'), 20)
示例#11
0
    def test_kernel_attributes(self):
        """Test nest attribute access of kernel attributes"""

        nest.ResetKernel()

        self.assertEqual(nest.GetKernelStatus(), nest.kernel_status)
        self.assertEqual(nest.GetKernelStatus("resolution"), nest.resolution)

        nest.resolution = 0.4
        self.assertEqual(0.4, nest.resolution)
        self.assertRaises(AttributeError, setattr, nest, "network_size", 120)
 def test_connect_oversized_mask(self):
     """Connecting with specified oversized mask possible."""
     free_layer = nest.Create('iaf_psc_alpha', positions=nest.spatial.free(
         [[0., 0.]], edge_wrap=True, extent=[1., 1.]))
     conn_spec = {'rule': 'pairwise_bernoulli', 'p': 1.0, 'mask': {'circular': {'radius': 2.}}}
     with self.assertRaises(nest.kernel.NESTError):
         nest.Connect(free_layer, free_layer, conn_spec)
     self.assertEqual(nest.GetKernelStatus('num_connections'), 0)
     conn_spec['allow_oversized_mask'] = True
     nest.Connect(free_layer, free_layer, conn_spec)
     self.assertEqual(nest.GetKernelStatus('num_connections'), 1)
示例#13
0
def clean_up_delay_units(input_data, kernel):

    if input_data is None or input_data == 0.:
        return kernel.GetKernelStatus("resolution")
    else:
        output = copy.copy(input_data)
        if isinstance(output, (dict, )) and 'low' in output:
            output['low'] = kernel.GetKernelStatus(
                "resolution"
            ) * .001  # This will get converted back by the next function
        return clean_up_units(output)
示例#14
0
 def train(self, data, target):
     self.__set_input(data)
     cnt = 0
     while not self.__convergence(data, target):
         cnt += 1
         logging.info("Training: #%s", cnt)
         self.__set_teacher(self._converter.target(target))
         nest.SetStatus(self.__generator,
                        {'origin': nest.GetKernelStatus()['time']})
         nest.SetStatus(self.__teacher,
                        {'origin': nest.GetKernelStatus()['time']})
         nest.Simulate(self.__duration)
示例#15
0
	def updateSpikeTimes(self,start=None,period=None,frequency=1/4.0):

		start 	= start  if start 	else self.start
		period 	= period if period 	else self.period

		if  start< nest.GetKernelStatus("time"):
			lag = nest.GetKernelStatus("time") - start
			print("WARNING: spike generator started with lag" + str(lag))

		step = 1/frequency
		self.spikeTimes = np.arange(start,period,step)	
		nest.SetStatus(self.drive,{"spike_times":self.spikeTimes})
示例#16
0
def init_connection(dic_layer, param_topology, param_connection):
    """
    Create the connection between all the neurons
    :param dic_layer: Dictionary with all the layer
    :param param_topology: Parameter for the topology
    :param param_connection: Parameter for the connections
    :return: nothing
    """
    ## Connection inside all region

    #type of synapse
    nest.CopyModel(
        "static_synapse", "excitatory_inside", {
            "weight": param_connection['weight_local'],
            "delay": nest.GetKernelStatus("min_delay")
        })
    nest.CopyModel(
        "static_synapse", "inhibitory_inside", {
            "weight":
            -param_connection['g'] * param_connection['weight_local'],
            "delay": nest.GetKernelStatus("min_delay")
        })

    #type of connection
    conn_params_ex_inside = {
        'rule':
        'fixed_indegree',
        'indegree':
        int(param_connection['p_connect'] *
            int(param_topology['nb_neuron_by_region'] *
                (1 - param_topology['percentage_inhibitory'])))
    }
    conn_params_in_inside = {
        'rule':
        'fixed_indegree',
        'indegree':
        int(param_connection['p_connect'] *
            int(param_topology['nb_neuron_by_region'] *
                param_topology['percentage_inhibitory']))
    }

    # connection between each population
    list_layer_ex = dic_layer['excitatory']['list']
    list_layer_in = dic_layer['inhibitory']['list']
    weights = np.load(param_connection['path_weight'])
    delays = np.around(
        np.load(param_connection['path_distance']) *
        param_connection['velocity'] / nest.GetKernelStatus('resolution')
    ) * nest.GetKernelStatus('resolution')
    delays[np.where(delays <= 0.0)] = nest.GetKernelStatus("min_delay")
    return (conn_params_ex_inside, conn_params_in_inside, list_layer_ex,
            list_layer_in, weights, delays)
示例#17
0
 def test_min_max_delay_using_default_delay(self):
     nest.ResetKernel()
     delay = 1.0
     syn_model = 'static_synapse'
     nest.SetStructuralPlasticityStatus({
         'structural_plasticity_synapses': {
             'syn1': {
                 'synapse_model': syn_model,
                 'pre_synaptic_element': 'SE1',
                 'post_synaptic_element': 'SE2',
             }
         }
     })
     self.assertLessEqual(nest.GetKernelStatus('min_delay'), delay)
     self.assertGreaterEqual(nest.GetKernelStatus('max_delay'), delay)
示例#18
0
    def simulate(self, dvs_data, reward_conditional, reward_collision):
        nest.SetStatus(
            self.conn_l,
            {"n": reward_conditional * p.reward_factor - reward_collision})
        nest.SetStatus(
            self.conn_r,
            {"n": -reward_conditional * p.reward_factor + reward_collision})

        time = nest.GetKernelStatus("time")

        nest.SetStatus(self.spike_generators, {"origin": time})
        nest.SetStatus(self.spike_generators, {"stop": p.sim_time})

        dvs_data = dvs_data.reshape(dvs_data.size)

        for i in range(dvs_data.size):
            rate = dvs_data[i] / p.max_spikes
            rate = np.clip(rate, 0, 1) * p.max_poisson_freq
            nest.SetStatus([self.spike_generators[i]], {"rate": rate})

        nest.Simulate(p.sim_time)

        n_l = nest.GetStatus(self.spike_detector, keys="n_events")[0]
        n_r = nest.GetStatus(self.spike_detector, keys="n_events")[1]

        nest.SetStatus(self.spike_detector, {"n_events": 0})
        weights_l = np.array(nest.GetStatus(
            self.conn_l, keys="weight")).reshape(p.resolution)
        weights_r = np.array(nest.GetStatus(
            self.conn_r, keys="weight")).reshape(p.resolution)

        return n_l, n_r, weights_l, weights_r
示例#19
0
    def __setup_nest(self):
        """ Initializes the NEST kernel.

        Reset the NEST kernel and pass parameters to it.
        The number of seeds for random number generation are computed based on
        the total number of virtual processes
        (number of MPI processes x number of threads per MPI process).
        """
        nest.ResetKernel()

        # set seeds for random number generation
        nest.SetKernelStatus(
            {'local_num_threads': self.sim_dict['local_num_threads']})
        N_vp = nest.GetKernelStatus('total_num_virtual_procs')

        rng_seed = self.sim_dict['rng_seed']

        if nest.Rank() == 0:
            print('RNG seed: {} '.format(rng_seed))
            print('  Total number of virtual processes: {}'.format(N_vp))

        # pass parameters to NEST kernel
        self.sim_resolution = self.sim_dict['sim_resolution']
        kernel_dict = {
            'resolution': self.sim_resolution,
            'rng_seed': rng_seed,
            'overwrite_files': self.sim_dict['overwrite_files'],
            'print_time': self.sim_dict['print_time']
        }
        nest.SetKernelStatus(kernel_dict)
示例#20
0
def run_simulation():
    """Performs a simulation, including network construction"""

    # open log file
    with Logger(params['log_file']) as logger:

        nest.ResetKernel()
        nest.set_verbosity(M_INFO)

        logger.log(str(memory_thisjob()) + ' # virt_mem_0')

        sr = build_network(logger)

        tic = time.time()

        nest.Simulate(params['presimtime'])

        PreparationTime = time.time() - tic

        logger.log(str(memory_thisjob()) + ' # virt_mem_after_presim')
        logger.log(str(PreparationTime) + ' # presim_time')

        tic = time.time()

        nest.Simulate(params['simtime'])

        SimCPUTime = time.time() - tic

        logger.log(str(memory_thisjob()) + ' # virt_mem_after_sim')
        logger.log(str(SimCPUTime) + ' # sim_time')

        if params['record_spikes']:
            logger.log(str(compute_rate(sr)) + ' # average rate')

        print(nest.GetKernelStatus())
示例#21
0
    def test_simple(self):
        nodes = nest.Create('iaf_psc_alpha', 5)
        nest.Connect(nodes, nodes, 'one_to_one')

        nest.DisconnectOneToOne(nodes[0], nodes[0], 'static_synapse')

        self.assertEqual(nest.GetKernelStatus('num_connections'), 4)
 def simulate(self, dvs_data, reward):
     """Simulate the SNN (use this for training as weights are changed)."""
     # Set reward signal for left and right network
     nest.SetStatus(self.conn_l, {"n": -reward})
     nest.SetStatus(self.conn_r, {"n": reward})
     # Set poisson neuron firing time span
     time = nest.GetKernelStatus("time")
     nest.SetStatus(self.spike_generators, {"origin": time})
     nest.SetStatus(self.spike_generators, {"stop": params.sim_time})
     # Set poisson neuron firing frequency
     dvs_data = dvs_data.reshape(dvs_data.size)
     for i in range(dvs_data.size):
         rate = dvs_data[i] / params.max_spikes
         rate = np.clip(rate, 0, 1) * params.max_poisson_freq
         nest.SetStatus([self.spike_generators[i]], {"rate": rate})
     # Simulate network in NEST
     nest.Simulate(params.sim_time)
     # Get left and right output spikes
     n_l = nest.GetStatus(self.spike_detector, keys="n_events")[0]
     n_r = nest.GetStatus(self.spike_detector, keys="n_events")[1]
     # Reset output spike detector
     nest.SetStatus(self.spike_detector, {"n_events": 0})
     # Get network weights
     weights_l = np.array(nest.GetStatus(
         self.conn_l, keys="weight")).reshape(params.resolution)
     weights_r = np.array(nest.GetStatus(
         self.conn_r, keys="weight")).reshape(params.resolution)
     return n_l, n_r, [weights_l, weights_r]
示例#23
0
    def __init__(self, dt, nthreads=1):

        self.name = self.__class__.__name__
        nest.ResetKernel()
        nest.set_verbosity('M_QUIET')
        self.dt = dt

        # parameters = nest.GetDefaults(self.stn_model_name)
        # for i in parameters:
        # print(i, parameters[i])

        if not os.path.exists(self.data_path):
            os.makedirs(self.data_path)

        nest.SetKernelStatus({
            "resolution": dt,
            "print_time": False,
            "overwrite_files": True,
            "data_path": self.data_path,
            "local_num_threads": nthreads
        })

        np.random.seed(2)

        # Create and seed RNGs
        master_seed = 1000  # master seed
        n_vp = nest.GetKernelStatus('total_num_virtual_procs')
        master_seed_range1 = range(master_seed, master_seed + n_vp)
        self.pyrngs = [np.random.RandomState(s) for s in master_seed_range1]
        master_seed_range2 = range(master_seed + n_vp + 1,
                                   master_seed + 1 + 2 * n_vp)
        nest.SetKernelStatus({
            'grng_seed': master_seed + n_vp,
            'rng_seeds': master_seed_range2
        })
示例#24
0
def SaveSimulationToFile(filename, network, memories):
    network_nodes = network['population_nodes']

    memory_nodes = []
    for memory in memories:
        memory_nodes += GetPatternNodes(memory, network_nodes[0])

    L23_sd = nest.GetStatus(network['device_nodes'][0], keys='events')[0]
    L4_sd = nest.GetStatus(network['device_nodes'][1], keys='events')[0]
    basket_sd = nest.GetStatus(network['device_nodes'][2], keys='events')[0]

    multimeters_per_MC = []
    for mm in network['device_nodes'][3]:
        multimeters_per_MC.append(nest.GetStatus(mm)[0])

    alldata = {
        'population_nodes': network_nodes,
        'spike_detectors': [L23_sd, L4_sd, basket_sd],
        'multimeters': multimeters_per_MC,
        'memories': memory_nodes,
        'parameters': CreateParametersDict(),
        'simtime': nest.GetKernelStatus()['time']
    }

    with open(filename, "wb") as fp:
        pickle.dump(alldata, fp)
示例#25
0
def resume(data, local_num_threads=1):
    # print data
    recorders = []
    for idx, node in enumerate(data['nodes']):
        if len(node.get('ids', [])) == 0:
            continue
        if node['element_type'] != 'recorder':
            nest.SetStatus(node['ids'], params=paramify.resume(node))
        else:
            recorders.append((idx, node['ids']))

    # for link in data['links']:
    #     if link.get('disabled', False): continue
    #     if data['nodes'][link['source']].get('disabled', False): continue
    #     if data['nodes'][link['target']].get('disabled', False): continue
    #     if data['nodes'][link['target']]['model'] == 'recorder': continue
    #     if not data['nodes'][link['source']].get('ids', False): continue
    #     if not data['nodes'][link['target']].get('ids', False): continue
    #     source = data['nodes'][link['source']]['ids']
    #     target = data['nodes'][link['target']]['ids']
    #     syn_spec = link.get('syn_spec',{'weight': 1.})
    #     nest.SetStatus(nest.GetConnections(source,target),syn_spec)

    nest.Simulate(float(data['sim_time']))
    data['kernel']['time'] = nest.GetKernelStatus('time')

    for idx, recorder in recorders:
        events = nest.GetStatus(recorder, 'events')[0]
        data['nodes'][idx]['events'] = dict(
            map(lambda X: (X[0], X[1].tolist()), events.items()))
        nest.SetStatus(recorder, {'n_events': 0})

    return data
示例#26
0
def main(num):
    nest.ResetKernel()
    msd = int(time.time())
    N_vp = nest.GetKernelStatus(['total_num_virtual_procs'])[0]
    nest.SetKernelStatus(
        {'rng_seeds': range(msd + N_vp + 1, msd + 2 * N_vp + 1)})

    #SET PARAMETERS
    numNeurons = 50
    cE = float((.8 * numNeurons) / 10)
    poisson_rate = 5.0  #1000.0*((2.0*30.0)/(0.1*20.0*cE))*cE
    neuronPop, popMatrix = readAndCreate("resultingMatrix.csv")

    #CREATE NODES
    noise = nest.Create("poisson_generator", 1, {'rate': poisson_rate})
    #noiseIn = nest.Create("poisson_generator",1,{'rate':10000.0})
    #sine = nest.Create("ac_generator",1,{"amplitude": 100.0, "frequency" :2.0})
    spikes = nest.Create("spike_detector", 1)
    #spikesEx = spikes[:1]
    #spikesIn = spikes[1:]

    Ex = 1
    d = 20.0
    wEx = 15.1
    wIn = -1.0

    #SPECIFY CONNECTION DICTIONARIES
    conn_dict = {
        "rule": "fixed_indegree",
        "indegree": Ex,
        "autapses": False,
        "multapses": False
    }  #connection dictionary
    syn_dict_ex = {"delay": d, "weight": wEx}
    syn_dict_in = {"delay": d, "weight": wIn}

    #SPECIFY CONNECTIONS
    nest.Connect(noise, neuronPop, syn_spec=syn_dict_ex)
    nest.Connect(neuronPop, spikes)

    #readAndConnect("./Syn Weights/syn_weights1.csv",pop)
    simTime = 10000.0
    nest.Simulate(simTime)
    n = nest.GetStatus(spikes, "events")[0]
    temp = numpy.array([n['senders'], n['times']])
    fullMatrix = spikeTimeMatrix(temp, len(neuronPop), int(simTime))
    numpy.savetxt("./Spike Results/" + str(num) + "idTimes.csv",
                  fullMatrix,
                  delimiter=',')
    #pylab.figure(2)
    #plot1 = drawNetwork(neuronPop)
    #plt.show()
    plot = nest.raster_plot.from_device(spikes, hist=True)
    '''
	The exact neuron spikes and corresponding timings can be obtained by viewing the events
	dictionary of GetStatus(spikesEx, "events")
	'''
    #print nest.GetStatus(spikes, "events")
    #print nest.GetStatus(nest.GetConnections(neuronPop, synapse_model = 'stdp_synapse'))
    plt.show()
def _round_up(simtime):
    """
    Returns simulation time rounded up to next multiple of resolution.
    """

    res = nest.GetKernelStatus('resolution')
    return res * math.ceil(float(simtime) / float(res))
    def test_decay_Rpre_half_fail(self):
        """Stochastic and selective increase of r_jk, plus decay"""
        self.setUp_decay(params={'p_fail': .2, 'n_pot_conns': 2})

        # this seed lets the first spike on syn 0 and the second spike on syn 1
        # pass
        msd = 4
        N_vp = nest.GetKernelStatus(['total_num_virtual_procs'])[0]
        nest.SetKernelStatus({'grng_seed': msd + N_vp})
        nest.SetKernelStatus(
            {'rng_seeds': range(msd + N_vp + 1, msd + 2 * N_vp + 1)})

        nest.Simulate(210.)

        syn_status = nest.GetStatus(self.syn)
        syn_defaults = nest.GetDefaults('testsyn')
        val = syn_status[0]['r_jk']
        dt = 0.001
        tau = syn_defaults['tau']
        prop = np.exp(-dt / tau)

        spike0 = 1. / tau * prop**100.  # propagate spike 1 by 100ms
        # no spike increase on 1, only second spike increase on 2
        val_exp = [spike0, 1. / tau]
        for k in range(len(val)):
            self.assertAlmostEqualDetailed(
                val_exp[k], val[k],
                "Decay of r_jk[%i] not as expected (stochastic case)" % k)
示例#29
0
    def set_drive(self, length, drive_type='baseline', delay=None):
        drive_types = ['baseline', 'rewarding', 'aversive']
        if drive_type not in drive_types:
            raise ValueError('drive_type must one of those:', drive_types)

        begin = nest.GetKernelStatus()['time'] + self.dt
        end = begin + length - .5 * self.dt  # subtract .5 dt for numerical stability

        if drive_type == 'baseline':
            spike_times = np.arange(begin, end, self.dt)
        else:
            if delay is None:
                raise ValueError(
                    'It is necessary to specify the delay for reward or aversion'
                )
            delivery = begin + delay
            if drive_type == 'rewarding':  # i.e the baseline with some extra spikes
                spike_times = np.sort(
                    np.concatenate(
                        (np.arange(begin, end, self.dt),
                         np.arange(delivery,
                                   delivery + (self.salience - .5) * self.dt,
                                   self.dt))))
            elif drive_type == 'aversive':  # i.e. the baseline with some missing spikes
                spike_times = np.concatenate(
                    (np.arange(begin, delivery - .5 * self.dt, self.dt),
                     np.arange(delivery + (self.dt * self.salience), end,
                               self.dt)))

        spike_times = np.round(spike_times, decimals=1)
        nest.SetStatus(self.drive, params={'spike_times': spike_times})
示例#30
0
文件: db_main.py 项目: SBottani/NNGT
 def log_simulation_end(self, activity=None):
     '''
     Record the simulation completion and simulated times, save the data,
     then reset.
     '''
     if self.is_clear():
         raise RuntimeError("Database log ended with empy log.")
     # get completion time and simulated time
     self.current_simulation['completion_time'] = datetime.now()
     start_time = self.current_simulation['simulated_time']
     new_time = nest.GetKernelStatus('time')
     self.current_simulation['simulated_time'] = new_time - start_time
     # save activity if provided
     if activity is not None:
         self._make_activity_entry(activity)
     # save data and reset
     self.computer.save()
     self.neuralnet.save()
     for entry in iter(self.nodes.values()):
         entry.save()
     for entry in iter(self.connections.values()):
         entry.save()
     simul_data = Simulation(**self.current_simulation)
     simul_data.save()
     if config["to_file"]:
         db_cls = list(self.tables.values())
         q = (Simulation.select(*db_cls).join(Computer).join(NeuralNetwork).
              join(Activity).join(Neuron).join(Synapse).join(Connection))
         dump_csv(
             q, "{}_{}.csv".format(self.computer.name,
                                   simul_data.completion_time))
     self.reset()