def test_MultipleSynapses_spatial_network_receptor_type(self):
        """test co-location of synapses for spatial networks with receptor_type"""
        num_src = 11
        num_trgt = 37
        indegree = 3
        max_receptor_type = 7

        spatial_nodes_src = nest.Create(
            'iaf_psc_exp_multisynapse',
            num_src, {'tau_syn': [0.1 + i for i in range(max_receptor_type)]},
            positions=nest.spatial.free(nest.random.uniform(),
                                        num_dimensions=2))
        spatial_nodes_trgt = nest.Create(
            'iaf_psc_exp_multisynapse',
            num_trgt, {'tau_syn': [0.1 + i for i in range(max_receptor_type)]},
            positions=nest.spatial.free(nest.random.uniform(),
                                        num_dimensions=2))

        receptor_type_a = max_receptor_type - 3
        receptor_type_b = max_receptor_type
        nest.Connect(
            spatial_nodes_src, spatial_nodes_trgt, {
                'rule': 'fixed_indegree',
                'indegree': indegree
            },
            nest.CollocatedSynapses(
                {
                    'weight': 3.,
                    'receptor_type': receptor_type_a
                }, {
                    'weight':
                    nest.spatial_distributions.exponential(
                        nest.spatial.distance),
                    'delay':
                    1.4,
                    'receptor_type':
                    receptor_type_b
                }))
        conns = nest.GetConnections()
        self.assertEqual(num_trgt * indegree * 2, len(conns))
        reference = sorted([receptor_type_a, receptor_type_b] * num_trgt *
                           indegree)
        self.assertEqual(sorted(conns.receptor), reference)
    def test_connect_synapse_label(self):
        indegree = 10
        conn_spec = {
            'rule': 'fixed_indegree',
            'indegree': indegree,
            'p': 1.0,
            'mask': {
                'rectangular': {
                    'lower_left': [-5., -5.],
                    'upper_right': [0., 0.]
                }
            }
        }
        syn_label = 123
        syn_spec = {'synapse_model': 'stdp_synapse_lbl', 'synapse_label': syn_label}

        nest.Connect(self.layer, self.layer, conn_spec, syn_spec)
        conns = nest.GetConnections()
        self.assertEqual(conns.get('synapse_label'), [syn_label]*len(self.layer)*indegree)
Exemplo n.º 3
0
def adj_w_mat(ids, prj_name):
    ''' compute the weighted adjacency matrix;
    is slower than adj_mat because of the loop used
    ids: the id of the nodes for which to compute the weights
    prj_name: name of projection'''

    l1 = len(ids)
    offset = min(ids)
    mat = zeros((l1, l1))
    #pdb.set_trace()
    for ii, sid in enumerate(ids):
        info = nest.GetConnections([sid], prj_name)
        weights = info[0]['weights']
        tgt = array(info[0]['targets']) - offset
        #print ii,sid,len(tgt),len(weights),weights
        if any(tgt):
            mat[ii, tgt] = weights

    return mat
Exemplo n.º 4
0
    def test_connect_arrays_nonunique_dict_conn_spec(self):
        """Connecting NumPy arrays with non-unique node IDs and conn_spec as a dict"""
        n = 10
        nest.Create('iaf_psc_alpha', n)
        sources = np.arange(1, n+1, dtype=np.uint64)
        targets = self.non_unique
        weights = 2 * np.ones(n)
        delays = 1.5 * np.ones(n)

        nest.Connect(sources, targets, syn_spec={'weight': weights, 'delay': delays},
                     conn_spec={'rule': 'one_to_one'})

        conns = nest.GetConnections()

        for s, t, w, d, c in zip(sources, targets, weights, delays, conns):
            self.assertEqual(c.source, s)
            self.assertEqual(c.target, t)
            self.assertEqual(c.weight, w)
            self.assertEqual(c.delay, d)
Exemplo n.º 5
0
 def _assert_connect_layers_multapses(self, multapses):
     """Helper function which asserts that connecting with or without allowing multapses
     gives the expected number of multapses."""
     conn_spec = {
         'rule': 'fixed_indegree',
         'indegree': 10,
         'p': 1.0,
         'allow_autapses': False,
         'allow_multapses': multapses,
     }
     nest.Connect(self.layer, self.layer, conn_spec)
     conns = nest.GetConnections()
     conn_pairs = np.array([list(conns.sources()), list(conns.targets())]).T
     num_nonunique_conns = len(conn_pairs) - len(
         np.unique(conn_pairs, axis=0))
     if multapses:
         self.assertGreater(num_nonunique_conns, 0)
     else:
         self.assertEqual(num_nonunique_conns, 0)
Exemplo n.º 6
0
def _set_random_teacher_weights(sim_params, inputs_pa, teacher):
    if sim_params["do_shift_weights"]:
        if (
                np.random.rand() < 0.0
        ):  # apply a random shift to avoid bias towards u_target > u or u_target < u
            weights_shift = -15.0
        else:
            weights_shift = 15.0
    else:
        weights_shift = 0.0
    for conn in nest.GetConnections(source=inputs_pa, target=teacher):
        nest.SetStatus(
            conn,
            {
                "weight":
                np.random.uniform(*sim_params["range_teacher_weights"]) +
                weights_shift
            },
        )
Exemplo n.º 7
0
    def process_from_mem(self):
        reversed_gids = dict()
        for (key, value) in self.neurons.items():
            reversed_gids[value] = str(key)

        # for (key, value) in self.source.items():
        #     reversed_gids[value[0]] = str(key)

        num_points = int(self.n_trials * self.t_trial / self.dt)
        for i in range(len(self.multimeter)):
            gids = []
            for conn in nest.GetStatus(nest.GetConnections(self.multimeter[i])):
                gids.append(int(reversed_gids[conn["target"]]))

            events = nest.GetStatus(self.multimeter[i])[0]["events"]
            for gid in gids:
                self.sim_time[gid] = events['times'][np.where(events['senders'] == self.neurons[gid])][:int(self.t_trial/self.dt)]
                self.v_m[gid] = events['V_m'][np.where(events['senders'] == self.neurons[gid])][:num_points]
                self.v_m[gid].shape = (self.n_trials, int(self.t_trial / self.dt))
Exemplo n.º 8
0
    def test_SetLabelToSynapseSetDefaults(self):
        """Set a label to a labeled synapse on SetDefaults."""

        labeled_synapse_models = [s for s in nest.Models(
            mtype='synapses') if s.endswith("_lbl")]
        for syn in labeled_synapse_models:
            a = self.default_network(syn)

            # see if symmetric connections are required
            symm = nest.GetDefaults(syn, 'requires_symmetric')

            # set a label during SetDefaults
            nest.SetDefaults(syn, {'synapse_label': 123})
            nest.Connect(a, a, {"rule": "one_to_one", "make_symmetric": symm},
                         {"synapse_model": syn})
            c = nest.GetConnections(a, a)
            self.assertTrue(
                all([x == 123 for x in c.get('synapse_label')])
            )
Exemplo n.º 9
0
 def _E_pop_connectivity_matrix(self):
     # First create a map from global ids to matrices index
     prev_max_gid = 0
     gids, inds = list(), dict()
     for pop in self.sp_group['excitatory']:
         gids += self.nodes[pop]
         gid = np.array(self.nodes[pop])
         ind = gid - np.min(gid) + prev_max_gid + 1
         for g, i in zip(gid, ind):
             inds[g] = i
         prev_max_gid = np.max(gid)
     # Then calculate the matrix itself
     cnn_matrix = np.zeros((len(gids), len(gids)))
     for cnn in nest.GetConnections(gids, gids):
         pre = nest.GetStatus([cnn], 'source')[0]
         post = nest.GetStatus([cnn], 'target')[0]
         weights = nest.GetStatus([cnn], 'weight')
         cnn_matrix[inds[pre] - 1, inds[post] - 1] += np.sum(weights)
     return sparse.coo_matrix(cnn_matrix)
Exemplo n.º 10
0
    def test_connect_arrays_no_delays(self):
        """Connecting NumPy arrays without specifying delays"""
        n = 10
        nest.Create('iaf_psc_alpha', n)
        sources = np.arange(1, n + 1, dtype=np.uint64)
        targets = self.non_unique
        weights = np.ones(n)

        nest.Connect(sources,
                     targets,
                     conn_spec='one_to_one',
                     syn_spec={'weight': weights})

        conns = nest.GetConnections()

        for s, t, w, c in zip(sources, targets, weights, conns):
            self.assertEqual(c.source, s)
            self.assertEqual(c.target, t)
            self.assertEqual(c.weight, w)
Exemplo n.º 11
0
def update_weight(params, inputs_pa, neurons, R, plasticity_rule):

    history_E = []
    history_w = []
    history_delta_w = []
    for target_gid in neurons:
        conn = nest.GetConnections(source=inputs_pa, target=target_gid)
        for conn_i in conn:
            w = nest.GetStatus(conn_i, "weight")[0]
            E = nest.GetStatus(conn_i, "E")[0]
            delta_w = plasticity_rule(params["learning_rate"], R, E)

            history_E.append(E)
            history_w.append(w)
            history_delta_w.append(delta_w)

            nest.SetStatus(conn_i, {"weight": w + delta_w})

    return history_E, history_w, history_delta_w
Exemplo n.º 12
0
    def testMultapses(self):
        """Weight Recorder Multapses"""

        nest.ResetKernel()
        nest.SetKernelStatus({"local_num_threads": 2})

        wr = nest.Create('weight_recorder', params={"withport": True})
        nest.CopyModel("stdp_synapse", "stdp_synapse_rec",
                       {"weight_recorder": wr[0], "weight": 1.})

        sg = nest.Create("spike_generator",
                         params={"spike_times": [10., 15., 55., 70.]})
        pre = nest.Create("parrot_neuron", 5)
        post = nest.Create("parrot_neuron", 5)

        nest.Connect(pre, post, 'one_to_one', syn_spec="stdp_synapse_rec")
        nest.Connect(pre, post, 'one_to_one', syn_spec="stdp_synapse_rec")
        nest.Connect(sg, pre)

        # simulate before GetConnections
        # as order of connections changes at beginning of simulation (sorting)
        nest.Simulate(1)

        connections = [(c[0], c[1], c[4])
                       for c in nest.GetConnections(pre, post)]

        nest.Simulate(100)

        wr_events = nest.GetStatus(wr, "events")[0]
        senders = wr_events["senders"]
        targets = wr_events["targets"]
        ports = wr_events["ports"]
        ids = list(zip(senders, targets, ports))

        # create an array of object dtype to use np.unique to get
        # unique ids
        unique_ids = np.empty(len(ids), dtype=object)
        for i, v in enumerate(ids):
            unique_ids[i] = v
        unique_ids = np.unique(unique_ids)

        self.assertEqual(sorted(unique_ids), sorted(connections))
Exemplo n.º 13
0
    def random_reconnect(self):
        """Adds connection from neurons which recently fired to a random target. Can result in no change."""
        # todo use self.getrecentfiring()

        # calculate neurons which fired in the last cycle
        spikesenders: List[float] = self.spike_detector.get(
            {"events"})["events"]["senders"]
        # filter spikes since last cycle
        spikesenders: List[float] = spikesenders[self.last_num_spikes:]
        neurons_fired_cycle = np.unique(spikesenders)

        source = np.random.choice(neurons_fired_cycle, 1)[0]
        type = "excitatory" if source in self.neur_ids_ex else "inhibitory"
        # get synapes where there is zero weight
        noconn_from_source = set(
            np.where(self.actor.lastweightsmatrix[source, :] == 0)[0])
        candidates = set(self.neur_ids_core) & noconn_from_source
        if len(candidates) > 1 and self.synapsecontingent > 0:
            self.synapsecontingent -= 1
            # no self connection
            candidates.remove(source)
            target = np.random.choice(list(candidates), 1)[0]
            print(f"random connect of {source}->{target}")
            # add to front-end
            self.add_connection(source, target, type)
            nest.set_verbosity("M_ERROR")
            nest.Connect(nest.NodeCollection([source]),
                         nest.NodeCollection([target]),
                         syn_spec={
                             'synapse_model':
                             'stdp_dopamine_synapse_in' if type == "inhibitory"
                             else 'stdp_dopamine_synapse_ex'
                         })
            nest.set_verbosity("M_WARNING")
            synapse = nest.GetConnections(source=nest.NodeCollection([source]),
                                          target=nest.NodeCollection([target]))
            if type == "inhibitory":
                synapse.set({"weight": -gv.w0_min})
            else:
                synapse.set({"weight": gv.w0_min})
            # indices have changes so update everything
            self.update_connections_nest()
Exemplo n.º 14
0
    def set_stimulus_times(stim_t, stim_id,Wmax):
        #First ms must be set individually
        # otherwise stimulus spike must have occured at -1ms
        nest.SetStatus([neurons[stim_id[0] - 1]], 'I', 2*Wmax)
        stim_id = stim_id[stim_t > 0]
        stim_t = stim_t[stim_t > 0]

        #create spike generator for every neuron that receives the spike times read out from orginal
        random_input = nest.Create('spike_generator', len(neurons))
        nest.Connect(random_input, neurons, 'one_to_one')
        #20 mv are able to make a neuron spike
        nest.SetStatus(nest.GetConnections(random_input), 'weight', 2*Wmax)

        #set spike times
        for i in np.unique(stim_id):
            idx = stim_id == i
            times = stim_t[idx]
            nest.SetStatus([random_input[int(i - 1)]], {'spike_times': times})
        del stim_id
        del stim_t
def create_adjacency_matrix(src_nodes, target_nodes):
    """
    Creates the adjacency matrix A for the connections between source and target nodes. A_ij = weight if there is a
    connection between node i and j and 0 otherwise
    :param src_nodes: Source nodes
    :param target_nodes: Target nodes
    :return: Adjacency matrix
    """
    connect_values = nest.GetConnections(source=src_nodes)
    connect_values = [
        connection for connection in connect_values
        if nest.GetStatus([connection], "target")[0] in target_nodes
    ]

    adjacency_mat = np.zeros((len(src_nodes), len(target_nodes)))
    adjacency_mat = set_values_in_adjacency_matrix(connect_values,
                                                   adjacency_mat,
                                                   min(src_nodes),
                                                   min(target_nodes))
    return adjacency_mat
Exemplo n.º 16
0
    def test_ThreadsGetConnections(self):
        """GetConnections with threads"""

        if not self.nest_multithreaded():
            self.skipTest("NEST was compiled without multi-threading")

        nest.ResetKernel()
        nest.local_num_threads = 8
        pre = nest.Create("iaf_psc_alpha")
        post = nest.Create("iaf_psc_alpha", 6)

        nest.Connect(pre, post)

        conn = nest.GetConnections(pre)
        # Because of threading, targets may be in a different order than
        # in post, so we sort the vector.
        targets = list(conn.get("target"))
        targets.sort()

        self.assertEqual(targets, post.tolist())
Exemplo n.º 17
0
def plotConnectionDistLogscale(input_pop,output_pop):
    plt.figure()
    connections = nest.GetConnections(input_pop,output_pop)
    weights = nest.GetStatus(connections,["weight"])
    edges = nest.GetStatus(connections,["source","target"])

    n_inputs = len(weights)
    print("Number of connections:",n_inputs)

    mu = np.mean(weights)
    sigma2 = np.var(weights)
    print("Mean weight: %f\n"%mu)
    print("Variance of weights: %f\n"%sigma2)
    bins = np.logspace(np.log(0.0001),np.log(10.0), 100)
    plt.xscale('log')
    plt.xlim([0.001,10])
    sns.distplot(weights,bins = bins,kde = False)
    plt.xlabel('EPSP (mV)')
    plt.ylabel('Probability density')
    plt.title('Synapse Weight distribution')
Exemplo n.º 18
0
 def test_GetConnectionsSourceModels(self):
     """GetConnections iterating models for source"""
     for model in nest.Models():
         nest.ResetKernel()
         alpha = nest.Create('iaf_psc_alpha')
         try:
             other = nest.Create(model)
             nest.Connect(other, alpha)
         except nest.kernel.NESTError:
             # If we can't create a node with this model, or connect
             # to a node of this model, we ignore it.
             continue
         for get_conn_args in [{'source': other, 'target': alpha},
                               {'source': other},
                               {'target': alpha}]:
             conns = nest.GetConnections(**get_conn_args)
             self.assertEqual(
                 len(conns), 1,
                 'Failed to get connection with source model {} (specifying {})'.format(
                     model, ', '.join(get_conn_args.keys())))
Exemplo n.º 19
0
    def test_SetLabelToSynapseOnConnect(self):
        """Set a label to a labeled synapse on connect."""

        for syn in [s for s in nest.synapse_models if s.endswith("_lbl")]:
            a, r_type = self.default_network(syn)

            # see if symmetric connections are required
            symm = nest.GetDefaults(syn, 'requires_symmetric')

            # set a label during connection
            nest.Connect(a, a, {
                "rule": "one_to_one",
                "make_symmetric": symm
            }, {
                "synapse_model": syn,
                "synapse_label": 123,
                "receptor_type": r_type
            })
            c = nest.GetConnections(a, a)
            self.assertTrue(all([x == 123 for x in c.get('synapse_label')]))
Exemplo n.º 20
0
    def test_connect_arrays_receptor_type(self):
        """Connecting NumPy arrays with receptor type specified, threaded"""

        nest.SetKernelStatus({'local_num_threads': 2})

        n = 10
        nest.Create('iaf_psc_alpha', n)
        sources = np.arange(1, n + 1, dtype=np.uint64)
        targets = self.non_unique

        weights = len(sources) * [2.]
        nest.Connect(sources,
                     targets,
                     conn_spec='one_to_one',
                     syn_spec={
                         'weight': weights,
                         'receptor_type': 0
                     })

        self.assertEqual(len(sources) * [0], nest.GetConnections().receptor)
Exemplo n.º 21
0
    def test_AllToAllWeight(self):
        """Weight given as list of lists, when connection rule is all_to_all"""

        src = nest.Create('iaf_psc_alpha', 3)
        tgt = nest.Create('iaf_psc_delta', 2)

        # weight has to be a list of lists with dimension (n_target x n_sources) when all_to_all is used
        ref_weights = [[1.2, -3.5, 2.5], [0.4, -0.2, 0.7]]

        conn_dict = {'rule': 'all_to_all'}
        syn_dict = {'weight': ref_weights}
        nest.Connect(src, tgt, conn_dict, syn_dict)

        conns = nest.GetConnections()
        weights = conns.weight

        # Need to flatten ref_weights in order to compare with the weights given by the SynapseCollection.
        ref_weights = [w for sub_weights in ref_weights for w in sub_weights]

        self.assertEqual(weights.sort(), ref_weights.sort())
Exemplo n.º 22
0
    def test_FixedOutdegreeWeight(self):
        """Weight given as list of lists, when connection rule is fixed_outdegree"""

        src = nest.Create('iaf_psc_alpha', 2)
        tgt = nest.Create('iaf_psc_delta', 5)

        # weight has to be a list of lists with dimension (n_source x outegree) when fixed_outdegree is used
        ref_weights = [[1.2, -3.5, 0.4], [-0.2, 0.6, 2.2]]

        conn_dict = {'rule': 'fixed_outdegree', 'outdegree': 3}
        syn_dict = {'weight': ref_weights}
        nest.Connect(src, tgt, conn_dict, syn_dict)

        conns = nest.GetConnections()
        weights = conns.weight

        # Need to flatten ref_weights in order to compare with the weights given by the SynapseCollection.
        ref_weights = [w for sub_weights in ref_weights for w in sub_weights]

        self.assertEqual(weights.sort(), ref_weights.sort())
    def test_SetLabelToSynapseOnConnect(self):
        """Set a label to a labeled synapse on connect."""

        labeled_synapse_models = [
            s for s in nest.Models(mtype='synapses') if s.endswith("_lbl")
        ]
        for syn in labeled_synapse_models:
            a = self.default_network()

            # set a label during connection
            nest.Connect(a, a, {"rule": "one_to_one"}, {
                "model": syn,
                "synapse_label": 123
            })
            c = nest.GetConnections(a, a)
            self.assertTrue(
                all([
                    status['synapse_label'] == 123
                    for status in nest.GetStatus(c)
                ]))
Exemplo n.º 24
0
    def test_ThreadsGetConnections(self):
        """GetConnections with threads"""

        if not self.nest_multithreaded():
            self.skipTest("NEST was compiled without multi-threading")

        nest.ResetKernel()
        nest.SetKernelStatus({'local_num_threads': 8})
        pre = nest.Create("iaf_neuron")
        post = nest.Create("iaf_neuron", 6)

        nest.Connect(pre, post)

        conn = nest.GetConnections(pre)
        # Because of threading, targets may be in a different order than
        # in post, so we sort the vector.
        targets = list(nest.GetStatus(conn, "target"))
        targets.sort()

        self.assertEqual(targets, list(post))
Exemplo n.º 25
0
    def kolmogorov_smirnov(self, weight_dict, expected_cdf_func):
        """
        Create connections with given distribution of weights and test that it
        fits the given expected cumulative distribution using K-S.
        """

        # n = rows * cols * Nconn
        rows = 10
        cols = 10
        Nconn = 100

        nest.ResetKernel()

        # Create layer and connect with given weight distribution
        layer = topo.CreateLayer(
            {'rows': rows, 'columns': cols, 'elements': 'iaf_neuron'})
        topo.ConnectLayers(layer, layer, {'connection_type': 'convergent',
                                          'number_of_connections': Nconn,
                                          'weights': weight_dict})

        # Get connection weights and sort
        connectome = nest.GetConnections()
        weights = numpy.array(nest.GetStatus(connectome, 'weight'))
        weights.sort()
        n = len(weights)

        # The observed (empirical) cdf is simply i/n for weights[i]
        observed_cdf = numpy.arange(n + 1, dtype=float) / n
        expected_cdf = expected_cdf_func(weights)

        D = max(numpy.abs(expected_cdf - observed_cdf[:-1]).max(),
                numpy.abs(expected_cdf - observed_cdf[1:]).max())

        # Code to find Kalpha corresponding to level alpha:
        # alpha = 0.05
        # import scipy.optimize,scipy.stats
        # Kalpha = scipy.optimize.fmin(lambda x:
        #                              abs(alpha-scipy.stats.ksprob(x)), 1)[0]
        Kalpha = 1.3581054687500012

        self.assertTrue(sqrt(n) * D < Kalpha)
def save_net(net, network_name, feature_folder, path="", use_cwd=True):
    """
    Save the neurons and connections of the network to be loaded quicker later
    :param net: The network object
    :param network_name: Network name for determining the saving location
    :param feature_folder: For saving the network with a particular feature
    :param path: Path where the network is saved to. If not set, the default is used
    :param use_cwd: If set to True, the current directory is added to the path
    :return: None
    """
    connect = nest.GetConnections(net.torus_layer_nodes)
    connect = tuple([(c[0], c[1]) for c in connect
                     if c[1] in net.torus_layer_nodes])
    net_dict = {
        "neurons": [tuple(net.torus_layer_nodes)],
        "inh_neurons": [tuple(net.torus_inh_nodes)],
        "positions": [net.torus_layer_positions],
        "tuning_neuron": [net.tuning_to_neuron_map],
        "neuron_tuning": [net.neuron_to_tuning_map],
        "color_map": [tuple(net.color_map.reshape(-1))],
        "connect": [connect],
    }

    net_df = pd.DataFrame(net_dict)

    if path == "":
        curr_dir = os.getcwd()
        if feature_folder != "":
            path = "%s/network_files/models/%s/%s" % (curr_dir, network_name,
                                                      feature_folder)
        else:
            path = "%s/network_files/models/%s" % (curr_dir, network_name)
        Path(path).mkdir(parents=True, exist_ok=True)
    elif use_cwd:
        curr_dir = os.getcwd()
        path = curr_dir + path + network_name

    num = len(os.listdir(path))
    net_df.to_csv("%s/%s_%s.csv" % (path, network_name, num),
                  encoding='utf-8',
                  index=False)
Exemplo n.º 27
0
    def test_synapse_creation(self):
        for syn_model in nest.Models('synapses'):
            if syn_model not in self.exclude_synapse_model:
                nest.ResetKernel()
                syn_dict = {'model': syn_model, 'pre_synaptic_element': 'SE1', 'post_synaptic_element': 'SE2'}
                nest.SetStructuralPlasticityStatus({'structural_plasticity_synapses': {'syn1': syn_dict}})
                neurons = nest.Create('iaf_neuron', 2, {
                    'synaptic_elements': {
                        'SE1': {'z': 10.0, 'growth_rate': 0.0},
                        'SE2': {'z': 10.0, 'growth_rate': 0.0}
                    }
                })
                nest.EnableStructuralPlasticity()
                nest.Simulate(10.0)
                status = nest.GetStatus(neurons, 'synaptic_elements')
                for st_neuron in status:
                    self.assertEqual(10, st_neuron['SE1']['z_connected'])
                    self.assertEqual(10, st_neuron['SE2']['z_connected'])

                self.assertEqual(20, len(nest.GetConnections(neurons, neurons, syn_model)))
                break
Exemplo n.º 28
0
    def test_MultipleSynapses_receptor_type_ht_neuron(self):
        """Test co-location of synapses with different receptor types and ht_neuron"""
        num_src = 9
        num_trg = 9

        src = nest.Create('ht_neuron', num_src)
        trgt = nest.Create('ht_neuron', num_trg)

        syn_spec = nest.CollocatedSynapses({'synapse_model': 'stdp_synapse',
                                            'weight': 5.,
                                            'receptor_type': 2},
                                           {'weight': 1.5, 'receptor_type': 4},
                                           {'synapse_model': 'stdp_synapse', 'weight': 3, 'receptor_type': 3})

        nest.Connect(src, trgt, 'one_to_one', syn_spec=syn_spec)

        conns = nest.GetConnections()
        # receptors are 1 less than receptor_type for ht_neuron
        ref_receptor_type = [1]*num_src + [2]*num_src + [3]*num_src

        self.assertEqual(ref_receptor_type, sorted(conns.receptor))
Exemplo n.º 29
0
    def test_MultipleSynapses_spatial_network(self):
        """test co-location of synapses for spatial networks with fixed indegree"""
        num_src = 11
        num_trgt = 37
        indegree = 3

        spatial_nodes_src = nest.Create('iaf_psc_alpha', n=num_src,
                                        positions=nest.spatial.free(nest.random.uniform(), num_dimensions=2))
        spatial_nodes_trgt = nest.Create('iaf_psc_alpha', n=num_trgt,
                                         positions=nest.spatial.free(nest.random.uniform(), num_dimensions=2))

        nest.Connect(spatial_nodes_src, spatial_nodes_trgt, {'rule': 'fixed_indegree', 'indegree': indegree},
                     nest.CollocatedSynapses({'weight': -3.},
                                             {'weight': nest.spatial_distributions.exponential(nest.spatial.distance),
                                              'delay': 1.4}))
        conns = nest.GetConnections()

        self.assertEqual(num_trgt * indegree * 2, len(conns))

        weights = conns.weight
        self.assertEqual(sorted(weights)[:num_trgt * indegree], [-3]*num_trgt*indegree)
Exemplo n.º 30
0
def get_connectivity_matrix(pop1, pop2):
    '''
    Returns a connectivity matrix describing all connections from pop1 to pop2
    such that M_ij describes the connection between the jth neuron in pop1 to the
    ith neuron in pop2.
    '''

    M = np.zeros((len(pop2), len(pop1)))
    connections = nest.GetConnections(pop1, pop2)
    index_dic = {}
    pop1 = np.asarray(pop1)
    pop2 = np.asarray(pop2)
    for node in pop1:
        index_dic[node] = np.where(pop1 == node)[0][0]
    for node in pop2:
        index_dic[node] = np.where(pop2 == node)[0][0]
    for conn in connections:
        source_id = conn[0]
        target_id = conn[1]
        M[index_dic[target_id]][index_dic[source_id]] += 1
    return M