示例#1
0
def main(plot_torus=True,
         plot_target=True,
         num_plot_tagets=3,
         use_lr_connection_type=NETWORK_DICT["np"]):
    """
    Main function running the test routines
    :param plot_torus: Flag to plot neural layer
    :param plot_target: Flag to plot targets to control established connections
    :param num_plot_tagets: Plot connections of the num_plot_targts-th node
    :param use_lr_connection_type: Define the type of long range connections
    :return None
    """
    torus_layer = create_torus_layer_uniform()
    if plot_torus:
        fig, _ = plt.subplots()
        tp.PlotLayer(torus_layer, fig)
        plt.show()

    create_local_circular_connections(torus_layer)

    debug_layer = create_distant_connections(
        torus_layer, connection_type=use_lr_connection_type)

    if plot_target:
        choice = np.random.choice(np.asarray(debug_layer),
                                  num_plot_tagets,
                                  replace=False)
        for c in choice:
            tp.PlotTargets([int(c)], torus_layer)
        plt.show()

    adj_mat = create_adjacency_matrix(
        nest.GetNodes(torus_layer)[0],
        nest.GetNodes(torus_layer)[0])
    eigenvalue_analysis(adj_mat, plot=True)
示例#2
0
def get_nest_adjacency(id_converter=None):
    '''
    Get the adjacency matrix describing a NEST network.

    Parameters
    ----------
    id_converter : dict, optional (default: None)
        A dictionary which maps NEST gids to the desired neurons ids.

    Returns
    -------
    mat_adj : :class:`~scipy.sparse.lil_matrix`
        Adjacency matrix of the network.
    '''
    gids = (nest.GetNodes()[0] if nest_version == 2
            else np.asarray(nest.GetNodes()))

    n = len(gids)

    mat_adj = ssp.lil_matrix((n,n))

    if id_converter is None:
        id_converter = {idx: i for i, idx in enumerate(gids)}

    for i in range(n):
        src = id_converter[gids[i]]
        connections = nest.GetConnections(source=(gids[i],))
        info = nest.GetStatus(connections)
        for dic in info:
            mat_adj.rows[src].append(id_converter[dic['target']])
            mat_adj.data[src].append(dic[WEIGHT])

    return mat_adj
    def loadLayers(self, layer_IDs):

        self.layers_to_record = []
        self.area_PSTH_index = []
        self.area_recorded_models = []

        self.layer_sizes = []
        self.layer_sizes_area = []

        for layer in self.labels:
            id_found = False
            for ll in layer_IDs:
                if (ll[0] == layer):
                    self.layers_to_record.append((ll[1], ll[2]))
                    self.layer_sizes.append(len(nest.GetNodes(ll[1])[0]))
                    id_found = True
            if id_found == False:
                # Assign random layer
                self.layers_to_record.append((ll[1], ll[2]))
                print("Warning: layer %s not found!" % layer)

        for layer in self.area_labels:
            # Search first for the matching spiking label
            index = 0
            for spiking_label in self.labels:
                if (layer == spiking_label):
                    self.area_PSTH_index.append(index)
                index += 1

            # Then search for the NEST ID
            id_found = False
            for ll in layer_IDs:
                if (ll[0] == layer):
                    self.area_recorded_models.append((ll[1], ll[2]))
                    self.layer_sizes_area.append(len(nest.GetNodes(ll[1])[0]))
                    id_found = True
            if id_found == False:
                # Assign random layer
                self.area_recorded_models.append((ll[1], ll[2]))
                print("Warning: layer %s not found!" % layer)

        # center cell
        if self.isCenterCell:

            type = 0
            for layer in self.labels:
                layer_side = int(np.sqrt(self.layer_sizes[type]))
                center_row = int(layer_side / 2.0)
                center_col = int(layer_side / 2.0)

                for cell in np.arange(self.layer_sizes[type]):
                    row = int(cell / layer_side)
                    col = np.remainder(cell, layer_side)
                    if row == center_row and col == center_col:
                        self.selected_cell.append(cell)
                type += 1
示例#4
0
def set_poisson_values(img_dict, poiss_layers):
    filtered_img_on = img_dict['on']
    filtered_img_off = img_dict['off']

    fixed_list_on = [k * factor_exc for k in filtered_img_on]
    fixed_list_off = [k * factor_exc for k in filtered_img_off]

    poiss_on = list(poiss_layers['poiss_on'])
    poiss_off = list(poiss_layers['poiss_off'])

    nest.SetStatus(nest.GetNodes(poiss_on)[0], 'rate', fixed_list_on)
    nest.SetStatus(nest.GetNodes(poiss_off)[0], 'rate', fixed_list_off)
示例#5
0
def create_random_patches(
    layer,
    r_loc=0.5,
    p_loc=0.7,
    num_patches=3,
):
    """
    Create random long range patchy connections. To every neuron a single link is established instead of
    taking axonal morphology into account.
    :param layer: Layer in which the connections should be established
    :param r_loc: Radius for local connections
    :param p_loc: Probability for local connections
    :param num_patches: Number of patches that should be created
    :return Nodes of the layer for debugging purposes (plotting)
    """
    # Calculate the parameters for the patches
    r_p = r_loc / 2.
    min_distance = r_loc + r_p
    max_distance = R_MAX / 2. - r_loc
    p_p = get_lr_connection_probability_patches(r_loc,
                                                p_loc,
                                                r_p,
                                                num_patches=num_patches)

    # Iterate through all neurons, as all neurons have random patches
    nodes = nest.GetNodes(layer)[0]
    for neuron in nodes:
        # Calculate radial distance and the respective coordinates for patches
        radial_distance = np.random.uniform(min_distance,
                                            max_distance,
                                            size=num_patches).tolist()
        radial_angle = np.random.uniform(0., 359., size=num_patches).tolist()

        # Calculate patch region
        mask_specs = {"radius": r_p}
        anchors = [
            to_coordinates(distance, angle)
            for angle, distance in zip(radial_angle, radial_distance)
        ]
        patches = tuple()
        for anchor in anchors:
            patches += tp.SelectNodesByMask(layer,
                                            anchor,
                                            mask_obj=tp.CreateMask(
                                                "circular", specs=mask_specs))

        # Define connection
        connect_dict = {"rule": "pairwise_bernoulli", "p": p_p}
        nest.Connect([neuron], patches, connect_dict)

    # Return nodes of layer for debugging
    return nest.GetNodes(layer)[0]
示例#6
0
    def setUp(self):
        self.num_neurons = 1500
        self.neuron_type = "iaf_psc_delta"
        self.rest_pot = -1.
        self.threshold_pot = 1e2
        self.time_const = 22.
        self.capacitance = 1e4

        self.num_stimulus_discr = 4
        self.size_layer = 2.

        self.input_stimulus = cs.image_with_spatial_correlation(size_img=(20, 20), radius=3, num_circles=80)
        self.organise_on_grid = True

        self.torus_layer, self.spike_det, self.multi = nc.create_torus_layer_uniform(
            num_neurons=self.num_neurons,
            neuron_type=self.neuron_type,
            rest_pot=self.rest_pot,
            threshold_pot=self.threshold_pot,
            time_const=self.time_const,
            capacitance=self.capacitance,
            size_layer=self.size_layer
        )
        self.torus_nodes = nest.GetNodes(self.torus_layer)[0]
        self.inh_nodes = np.random.choice(np.asarray(self.torus_nodes), self.num_neurons // 5, replace=False)
        self.min_id_torus = min(self.torus_nodes)
        self.torus_positions = tp.GetPosition(self.torus_nodes)
        self.torus_tree = KDTree(self.torus_positions)

        self.perlin_resolution = (15, 15)
        self.perlin_spacing = 0.1

        (self.tuning_to_neuron_map,
         self.neuron_to_tuning_map,
         self.tuning_weight_vector) = nc.create_perlin_stimulus_map(
            self.torus_layer,
            self.inh_nodes,
            num_stimulus_discr=self.num_stimulus_discr,
            resolution=self.perlin_resolution,
            spacing=self.perlin_spacing,
            plot=False,
            save_plot=False
        )

        self.retina = nc.create_input_current_generator(
            self.input_stimulus,
            organise_on_grid=self.organise_on_grid
        )

        self.receptors = nest.GetNodes(self.retina)[0]
示例#7
0
    def test_GetNodes_with_params(self):
        """test GetNodes with params"""
        nodes_Vm = nest.GetNodes({'V_m': -77.})
        nodes_Vm_ref = nest.NodeCollection([4, 5, 6, 8])

        self.assertEqual(nodes_Vm_ref, nodes_Vm)

        nodes_Vm_tau = nest.GetNodes({'V_m': -77., 'tau_m': 12.})
        nodes_Vm_tau_ref = nest.NodeCollection([8])

        self.assertEqual(nodes_Vm_tau_ref, nodes_Vm_tau)

        nodes_exp = nest.GetNodes({'model': 'iaf_psc_exp'})
        nodes_exp_ref = nest.NodeCollection([10, 11, 12, 13])

        self.assertEqual(nodes_exp_ref, nodes_exp)
示例#8
0
    def test_create_torus_layer_with_jitter(self):
        self.reset()

        # Must be number that has a square root in N
        num_neurons = 144
        jitter = 0.01
        neuron_type = "iaf_psc_delta"
        layer_size = 3.

        layer = nc.create_torus_layer_with_jitter(
            num_neurons=num_neurons,
            jitter=jitter,
            neuron_type=neuron_type,
            layer_size=layer_size
        )

        nodes = nest.GetNodes(layer)[0]
        self.assertEqual(len(nodes), num_neurons, "Not the right number of nodes")

        model = nest.GetStatus(nodes, "model")
        for m in model:
            self.assertEqual(m, neuron_type, "Wrong neuron type set")

        mod_size = layer_size - jitter * 2
        step_size = mod_size / float(np.sqrt(num_neurons))
        coordinate_scale = np.arange(-mod_size / 2., mod_size / 2., step_size)
        grid = [[x, y] for y in coordinate_scale for x in coordinate_scale]

        for n in nodes:
            pos = np.asarray(tp.GetPosition([n])[0])
            sorted(grid, key=lambda l: np.linalg.norm(np.asarray(l) - pos))
            self.assertLessEqual(pos[0], np.abs(grid[0][0] + jitter), "Distance in x to original position too high")
            self.assertLessEqual(pos[1], np.abs(grid[0][1] + jitter), "Distance in y to original position too high")
示例#9
0
 def __len__(self):
     """Return the number of connections on the local MPI node."""
     local_nodes = nest.GetNodes([0], local_only=True)[0]
     local_connections = nest.GetConnections(target=local_nodes,
                                             synapse_model=self.nest_synapse_model,
                                             synapse_label=self.nest_synapse_label)
     return len(local_connections)
    def loadLayers(self, layer_IDs):

        self.layers_to_record = []
        self.layer_sizes = []

        for layer in self.labels:
            id_found = False
            for ll in layer_IDs:
                if (ll[0] == layer):
                    self.layers_to_record.append((ll[1], ll[2]))
                    self.layer_sizes.append(len(nest.GetNodes(ll[1])[0]))
                    id_found = True
            if id_found == False:
                # Assign random layer
                self.layers_to_record.append((ll[1], ll[2]))
                print("Warning: layer %s not found!" % layer)

        # center cell
        if self.isCenterCell:

            type = 0
            for layer in self.labels:
                layer_side = int(np.sqrt(self.layer_sizes[type]))
                center_row = int(layer_side / 2.0)
                center_col = int(layer_side / 2.0)

                for cell in np.arange(self.layer_sizes[type]):
                    row = int(cell / layer_side)
                    col = np.remainder(cell, layer_side)
                    if row == center_row and col == center_col:
                        self.selected_cell.append(cell)
                type += 1
    def setUp(self):
        nest.ResetKernel()
        nest.SetKernelStatus({"total_num_virtual_procs": 4})
        nest.ResetNetwork()

        self.sim_time = 10000
        self.sim_step = 100

        nest.SetKernelStatus(
            {'structural_plasticity_update_interval': self.sim_time + 1})

        self.se_integrator = []
        self.sim_steps = None
        self.ca_nest = None
        self.ca_python = None
        self.se_nest = None
        self.se_python = None

        # build
        self.pop = nest.Create('iaf_neuron', 10)
        self.local_nodes = nest.GetNodes([0], {'model': 'iaf_neuron'}, True)[0]
        self.spike_detector = nest.Create('spike_detector')
        nest.Connect(self.pop, self.spike_detector, 'all_to_all')
        noise = nest.Create('poisson_generator')
        nest.SetStatus(noise, {"rate": 800000.0})
        nest.Connect(noise, self.pop, 'all_to_all')
示例#12
0
def get_and_save_connections(source_layers_dict, source_layer_key,
                             target_layers_dict, target_layer_key):
    params = ['source', 'target', 'weight']
    source_layer = source_layers_dict[source_layer_key]
    target_layer = target_layers_dict[target_layer_key]
    src_nodes = nest.GetNodes(source_layer)
    tgt_nodes = nest.GetNodes(target_layer)
    connections = nest.GetConnections(source=src_nodes[0])
    data = nest.GetStatus(connections, params)
    df = pd.DataFrame.from_records(data, columns=params)
    df = df[df['target'].between(np.min(tgt_nodes), np.max(tgt_nodes))]
    df.to_csv(connections_path + '/connections_' + str(source_layer_key) +
              '_to_' + str(target_layer_key) + '.txt',
              header=None,
              index=None,
              sep=' ',
              mode='a')
示例#13
0
    def test_GetNodes(self):
        """GetNodes"""

        nest.ResetKernel()
        model = 'iaf_neuron'
        l = nest.LayoutNetwork(model, [2, 3])
        allNodes = range(2, 10)
        allSubnets = [2, 6]
        allLeaves = [n for n in allNodes if n not in allSubnets]

        # test all
        self.assertEqual(nest.GetNodes(l), [allNodes])

        # test all with empty dict
        self.assertEqual(nest.GetNodes(l, properties={}), [allNodes])

        # test iteration over subnets
        self.assertEqual(nest.GetNodes(l + l), [allNodes, allNodes])

        # children of l are nodes
        self.assertEqual(nest.GetNodes(l, properties={'parent': l[0]}),
                         [allSubnets])

        # local id of second intermediate subnet and middle nodes
        self.assertEqual(nest.GetNodes(l, properties={'local_id': 2}),
                         [[4, 6, 8]])

        # selection by model type
        self.assertEqual(nest.GetNodes(l, properties={'model': 'subnet'}),
                         [allSubnets])
        self.assertEqual(nest.GetNodes(l, properties={'model': model}),
                         [allLeaves])
示例#14
0
    def test_GetNodes(self):
        """GetNodes"""

        nest.ResetKernel()
        model = 'iaf_psc_alpha'
        l = nest.LayoutNetwork(model, (2, 3))
        allNodes = tuple(range(2, 10))
        allSubnets = (2, 6)
        allLeaves = tuple(n for n in allNodes if n not in allSubnets)

        # test all
        self.assertEqual(nest.GetNodes(l), (allNodes, ))

        # test all with empty dict
        self.assertEqual(nest.GetNodes(l, properties={}), (allNodes, ))

        # test iteration over subnets
        self.assertEqual(nest.GetNodes(l + l), (allNodes, allNodes))

        # children of l are nodes
        self.assertEqual(nest.GetNodes(
            l, properties={'parent': l[0]}), (allSubnets, ))

        # local id of second intermediate subnet and middle nodes
        self.assertEqual(nest.GetNodes(
            l, properties={'local_id': 2}), ((4, 6, 8), ))

        # selection by model type
        self.assertEqual(nest.GetNodes(
            l, properties={'model': 'subnet'}), (allSubnets, ))
        self.assertEqual(nest.GetNodes(
            l, properties={'model': model}), (allLeaves, ))
 def test_sort_nodes_space(self):
     self.reset()
     layer = create_torus_layer_with_jitter(4, jitter=0, layer_size=2.)
     expected_positions = [[-1., -1.], [-1., 0.], [0., -1.], [0., 0.]]
     sorted_nodes, positions = tu.sort_nodes_space(nest.GetNodes(layer)[0],
                                                   axis=0)
     for sp, esp in zip(positions, expected_positions):
         self.assertListEqual(list(sp), esp,
                              "Nodes were not correctly sorted")
示例#16
0
def main_create_eigenspectra_plots():
    """
    Compute the eigenvalue spectra and plot them
    :return: None
    """
    torus_layer, _, _ = create_torus_layer_uniform()
    create_local_circular_connections_topology(torus_layer)

    for key in NETWORK_DICT:
        _ = create_distant_connections(torus_layer,
                                       connection_type=NETWORK_DICT[key])

        adj_mat = create_adjacency_matrix(
            nest.GetNodes(torus_layer)[0],
            nest.GetNodes(torus_layer)[0])
        eigenvalue_analysis(adj_mat,
                            plot=True,
                            save_plot=True,
                            fig_name="voges_adj_matrix_%s.png" % key)
示例#17
0
 def __len__(self):
     """Return the number of connections on the local MPI node."""
     nest_model = self.post.celltype.nest_name[
         self._simulator.state.spike_precision]
     local_nodes = nest.GetNodes({"model": nest_model}, local_only=True)
     local_connections = nest.GetConnections(
         target=local_nodes,
         synapse_model=self.nest_synapse_model,
         synapse_label=self.nest_synapse_label)
     return len(local_connections)
示例#18
0
    def create_populations(self):
        """
        Create all populations of the area.
        """
        self.gids = {}
        self.num_local_nodes = 0
        for pop in self.populations:
            gid = nest.Create(
                self.network.params['neuron_params']['neuron_model'],
                int(self.neuron_numbers[pop]))
            mask = create_vector_mask(self.network.structure,
                                      areas=[self.name],
                                      pops=[pop])
            I_e = self.network.add_DC_drive[mask][0]
            if not self.network.params['input_params']['poisson_input']:
                K_ext = self.external_synapses[pop]
                W_ext = self.network.W[self.name][pop]['external']['external']
                tau_syn = self.network.params['neuron_params'][
                    'single_neuron_dict']['tau_syn_ex']
                DC = K_ext * W_ext * tau_syn * 1.e-3 * \
                    self.network.params['rate_ext']
                I_e += DC
            nest.SetStatus(gid, {'I_e': I_e})

            # Store first and last GID of each population
            self.gids[pop] = (gid[0], gid[-1])

            # Initialize membrane potentials
            # This could also be done after creating all areas, which
            # might yield better performance. Has to be tested.
            for t in np.arange(nest.GetKernelStatus('local_num_threads')):
                local_nodes = np.array(
                    nest.GetNodes(
                        [0], {
                            'model':
                            self.network.params['neuron_params']
                            ['neuron_model'],
                            'thread':
                            t
                        },
                        local_only=True)[0])
                local_nodes_pop = local_nodes[(np.logical_and(
                    local_nodes >= gid[0], local_nodes <= gid[-1]))]
                if len(local_nodes_pop) > 0:
                    vp = nest.GetStatus([local_nodes_pop[0]], 'vp')[0]
                    # vp is the same for all local nodes on the same thread
                    nest.SetStatus(
                        list(local_nodes_pop), 'V_m',
                        self.simulation.pyrngs[vp].normal(
                            self.network.params['neuron_params']['V0_mean'],
                            self.network.params['neuron_params']['V0_sd'],
                            len(local_nodes_pop)))
                    self.num_local_nodes += len(local_nodes_pop)
示例#19
0
def print_gdf(f):
    """
    Print network as a graph in GDF format
    Args:
        f: file
    Returns: None
    """

    f.write("nodedef> label\n")
    for node in nest.GetNodes((0, ))[0]:
        f.write("%d\n" % node)

    print_connections(f)
示例#20
0
    def from_nest_network(cls,
                          gids,
                          resolution=None,
                          monitor_rate=None,
                          mean_field=True,
                          omp=1,
                          ignore_errors=False):
        '''
        Create a Simulator instance from the GIDs of a NEST network and the
        network parameters.

        Parameters
        ----------
        gids : tuple
            NEST GIDs.

        All the other parameters of the ``__init__`` method except for
        `num_neurons`.

        See also
        --------
        :func:`~PyNeurActiv.Simulator2017_SynchroBurst.__init__`,
        :func:`~PyNeurActiv.Simulator2017_SynchroBurst.from_nngt_network`.
        '''
        # get the neurons
        neurons = nest.GetNodes([0], properties={'element_type': 'neuron'})[0]
        num_neurons = len(neurons)
        di_param = nest.GetStatus((neurons[0], ))[0]
        edges_info = nest.GetStatus(nest.GetConnections(source=neurons))
        weight = 0.
        delay = 0.
        num_edges = 0.
        for di in edges_info:
            weight += di["weight"]
            delay += di["delay"]
            num_edges += 1.
        if not num_edges:
            raise RuntimeError("No edges in the network.")
        di_param["weight"] = weight / num_edges
        di_param["delay"] = delay / num_edges
        sim = cls(num_neurons,
                  di_param,
                  resolution=resolution,
                  monitor_rate=monitor_rate,
                  mean_field=mean_field,
                  omp=omp,
                  create_network=False,
                  gids=gids)
        return sim
示例#21
0
def _set_spike_data(data, spike_detector):
    '''
    Data must be [[], []]
    '''
    import nest
    from ..simulation.nest_utils import _get_nest_gids, spike_rec, nest_version

    if not len(data[0]):
        if spike_detector is None:
            prop = {'model': spike_rec}
            if nest_version == 3:
                spike_detector = nest.GetNodes(properties=prop)
            else:
                spike_detector = nest.GetNodes((0, ), properties=prop)[0]

        events = nest.GetStatus(spike_detector, "events")

        for ev_dict in events:
            data[0].extend(ev_dict["senders"])
            data[1].extend(ev_dict["times"])

    sorter = np.argsort(data[1])

    return np.array(data)[:, sorter].T
示例#22
0
def _set_spike_data(data, spike_detector):
    '''
    Data must be [[], []]
    '''
    import nest
    if not len(data[0]):
        if spike_detector is None:
            spike_detector = nest.GetNodes(
                (0, ), properties={'model': 'spike_detector'})[0]
        events = nest.GetStatus(spike_detector, "events")
        for ev_dict in events:
            data[0].extend(ev_dict["senders"])
            data[1].extend(ev_dict["times"])
    sorter = np.argsort(data[1])
    return np.array(data)[:, sorter].T
示例#23
0
    def test_create_input_current_generator(self):
        self.reset()

        input_stimulus = cs.image_with_spatial_correlation(size_img=(50, 50), radius=3, num_circles=80)
        organise_on_grid = True
        retina = nc.create_input_current_generator(
            input_stimulus,
            organise_on_grid=organise_on_grid
        )

        receptors = nest.GetNodes(retina)[0]
        amplitude = nest.GetStatus(receptors, "amplitude")
        self.assertTrue(
            np.all(np.asarray(amplitude) == input_stimulus.reshape(-1)),
            "The input stimulus and the current in the retina don't match"
        )
示例#24
0
def set_poisson_values(img_dict, poiss_layers, num_orientations):
    for i in range(0, num_orientations):
        orientation = i * 180 / num_orientations
        filtered_img = img_dict["orientation_" + str(orientation)]

        fixed_list = [
            k * factor if k > 10.0 else (11 - k)**0.1 * k * factor
            for k in filtered_img
        ]
        fixed_list = [
            fixed_list[k] if filtered_img[k] > poisson_bias else poisson_bias *
            factor_bias for k in range(0, len(fixed_list))
        ]

        l_poiss = list(poiss_layers['orientation_' +
                                    str(orientation)]['l_poiss_' +
                                                      str(orientation)])
        nest.SetStatus(nest.GetNodes(l_poiss)[0], 'rate', fixed_list)
示例#25
0
文件: func.py 项目: xerebral/NEUCOGAR
def build_model():
    global NEURONS
    nest.SetDefaults('iaf_psc_exp', iaf_neuronparams)
    layerNumberZ = 6
    neuronID = 2

    for layer in Cortex:
        columns = layer[area][X_area]
        rows = layer[area][Y_area]
        NEURONS += rows * columns

        for y in range(rows):
            for x in range(columns):
                dictPosition_NeuronID[(float(x), float(y),
                                       float(layerNumberZ))] = neuronID
                neuronID += 1

        layerNumberZ -= 1
        logger.debug("{0} {1} neurons".format(layer[Glu][k_name][:2],
                                              rows * columns))
        logger.debug("X: {0} ({1}neu x {2}col) \n".format(
            layer[area][X_area], sum(layer[step]), layer[area][X_area] /
            sum(layer[step])) + " " * 16 + "Y: {0} ({1}neu x {2}col)".format(
                layer[area][Y_area], 2, layer[area][Y_area] / 2))

    model_3D = tp.CreateLayer({
        'positions': dictPosition_NeuronID.keys(),
        'elements': 'iaf_psc_exp',
        'extent': [1000.0, 1000.0, 100.0],
        'edge_wrap': False
    })
    print nest.GetNodes(model_3D)
    # TODO uncomment if you want to see the 3D model
    tp.PlotLayer(model_3D)
    plt.show()

    # Build another parts
    for part in IC + MGB:
        part[k_model] = 'iaf_psc_exp'
        part[k_IDs] = nest.Create(part[k_model], part[k_NN])
        NEURONS += part[k_NN]
        logger.debug("{0} [{1}, {2}] {3} neurons".format(
            part[k_name], part[k_IDs][0], part[k_IDs][0] + part[k_NN] - 1,
            part[k_NN]))
示例#26
0
def create_overlapping_patches(layer,
                               r_loc=0.5,
                               p_loc=0.7,
                               distance=2.5,
                               num_patches=3,
                               allow_multapses=False):
    """
    Create long-range patchy connections with overlapping patches such that all neurons share almost the same
    parameters for these links
    :param layer: The layer in which the connections should be established
    :param r_loc: Radius of local connections
    :param p_loc: Probability of local connections
    :param distance: Distance of patches
    :param num_patches: Number of patches
    :param allow_multapses: Flag to allow multiple links between neurons
    :return: Neurons of the layer for debugging (plotting)
    """
    # Calculate parameters for pathces
    r_p = r_loc / 2.
    p_p = get_lr_connection_probability_patches(r_loc, p_loc, r_p)

    # Create overlapping patches as every neuron shares the same patch parameter
    for n in range(1, num_patches + 1):
        angle = n * 360. / float(num_patches)
        coordinates = to_coordinates(angle, distance)
        mask_dict = {"circular": {"radius": r_p, "anchor": coordinates}}

        connection_dict = {
            "connection_type": "divergent",
            "mask": mask_dict,
            "kernel": p_p,
            "allow_autapses": False,
            "allow_multapses": allow_multapses,
            "synapse_model": "static_synapse"
        }

        tp.ConnectLayers(layer, layer, connection_dict)

    # Return nodes of layer for debugging
    return nest.GetNodes(layer)[0]
示例#27
0
def get_firing_rate(network, spike_detector=None, data=None, nodes=None):
    '''
    Return the average firing rate for the neurons.

    Parameters
    ----------
    network : :class:`nngt.Network`
        Network for which the activity was simulated.
    spike_detector : tuple of ints, optional (default: spike detectors)
        GID of the "spike_detector" objects recording the network activity.
    data : array-like of shape (2, N), optionale (default: None)
        Array containing the spikes data (first line must contain the NEST GID
        of the neuron that fired, second line must contain the associated spike
        time).
    nodes : array-like, optional (default: all nodes)
        NNGT ids of the nodes for which the B2 should be computed.

    Returns
    -------
    fr : array-like
        Firing rate for each neuron in `nodes`.
    '''
    if data is None:
        data = [[], []]
    if nodes is None:
        nodes = network.nest_gid
    else:
        nodes = network.nest_gid[nodes]
    if not len(data[0]):
        if spike_detector is None:
            spike_detector = nest.GetNodes(
                (0,), properties={'model': 'spike_detector'})[0]
        events = nest.GetStatus(spike_detector, "events")
        for ev_dict in events:
            data[0].extend(ev_dict["senders"])
            data[1].extend(ev_dict["times"])
    data[0] = np.array(data[0])
    data[1] = np.array(data[1])
    return _fr_from_data(nodes, data)
示例#28
0
def create_distant_np_connections(layer,
                                  r_loc=0.5,
                                  p_loc=0.7,
                                  allow_multapses=False):
    """
    Create long distance connections without any patches
    :param layer: Layer in which the connections should be established
    :param r_loc: radius for local connections needed to calculate the long range connection probability
    :param p_loc: probability for local connections needed to calculate the long range connection probability
    :param allow_multapses: allow multiple connections between neurons
    :return Neurons of the layer for debugging (plotting)
    """
    # Mask for area to which long-range connections can be established
    mask_dict = {
        "doughnut": {
            "inner_radius": r_loc,
            "outer_radius": R_MAX / 2.
        }
    }

    # Get possibility to establish a single long-range connection
    p_lr = get_lr_connection_probability_np(r_loc, p_loc)

    connection_dict = {
        "connection_type": "divergent",
        "mask": mask_dict,
        "kernel": p_lr,
        "allow_autapses": False,
        "allow_multapses": allow_multapses,
        "allow_oversized_mask": True,
    }

    tp.ConnectLayers(layer, layer, connection_dict)

    # Return nodes of layer for debugging
    return nest.GetNodes(layer)[0]
    def test_get_in_out_degree(self):
        self.reset()
        layer = create_torus_layer_with_jitter(4, jitter=0, layer_size=2.)
        conn_dict = {
            "connection_type": "divergent",
            "mask": {
                "rectangular": {
                    "lower_left": [-1.0, -1.0],
                    "upper_right": [1.0, 1.0]
                }
            },
            "kernel": 1.0
        }

        expect_in = [9, 6, 6, 4]
        expect_out = [4, 6, 6, 9]

        tp.ConnectLayers(layer, layer, conn_dict)
        in_degree, out_degree, _, _, _, _ = tu.get_in_out_degree(
            nest.GetNodes(layer)[0])
        self.assertListEqual(expect_in, in_degree,
                             "In degree was not computed correctly")
        self.assertListEqual(expect_out, out_degree,
                             "Out degree was not computed correctly")
示例#30
0
    def create_populations(self):
        """ Creates the neuronal populations.

        The neuronal populations are created and the parameters are assigned
        to them. The initial membrane potential of the neurons is drawn from a
        normal distribution. Scaling of the number of neurons and of the
        synapses is performed. If scaling is performed extra DC input is added
        to the neuronal populations.

        """
        self.N_full = self.net_dict['N_full']
        self.N_scaling = self.net_dict['N_scaling']
        self.K_scaling = self.net_dict['K_scaling']
        self.synapses = get_total_number_of_synapses(self.net_dict)
        self.synapses_scaled = self.synapses * self.K_scaling
        self.nr_neurons = self.N_full * self.N_scaling
        self.K_ext = self.net_dict['K_ext'] * self.K_scaling
        self.w_from_PSP = get_weight(self.net_dict['PSP_e'], self.net_dict)
        self.weight_mat = get_weight(
            self.net_dict['PSP_mean_matrix'], self.net_dict
            )
        self.weight_mat_std = self.net_dict['PSP_std_matrix']
        self.w_ext = self.w_from_PSP
        if self.net_dict['poisson_input']:
            self.DC_amp_e = np.zeros(len(self.net_dict['populations']))
        else:
            if nest.Rank() == 0:
                print(
                    '''
                    no poisson input provided
                    calculating dc input to compensate
                    '''
                    )
            self.DC_amp_e = compute_DC(self.net_dict, self.w_ext)

        if nest.Rank() == 0:
            print(
                'The number of neurons is scaled by a factor of: %.2f'
                % self.N_scaling
                )
            print(
                'The number of synapses is scaled by a factor of: %.2f'
                % self.K_scaling
                )

        # Scaling of the synapses.
        if self.K_scaling != 1:
            synapses_indegree = self.synapses / (
                self.N_full.reshape(len(self.N_full), 1) * self.N_scaling)
            self.weight_mat, self.w_ext, self.DC_amp_e = adj_w_ext_to_K(
                synapses_indegree, self.K_scaling, self.weight_mat,
                self.w_from_PSP, self.DC_amp_e, self.net_dict, self.stim_dict
                )

        # Create cortical populations.
        self.pops = []
        pop_file = open(
            os.path.join(self.data_path, 'population_GIDs.dat'), 'w+'
            )
        for i, pop in enumerate(self.net_dict['populations']):
            population = nest.Create(
                self.net_dict['neuron_model'], int(self.nr_neurons[i])
                )
            nest.SetStatus(
                population, {
                    'tau_syn_ex': self.net_dict['neuron_params']['tau_syn_ex'],
                    'tau_syn_in': self.net_dict['neuron_params']['tau_syn_in'],
                    'E_L': self.net_dict['neuron_params']['E_L'],
                    'V_th': self.net_dict['neuron_params']['V_th'],
                    'V_reset':  self.net_dict['neuron_params']['V_reset'],
                    't_ref': self.net_dict['neuron_params']['t_ref'],
                    'I_e': self.DC_amp_e[i]
                    }
                )
            self.pops.append(population)
            pop_file.write('%d  %d \n' % (population[0], population[-1]))
        pop_file.close()
        for thread in np.arange(nest.GetKernelStatus('local_num_threads')):
            # Using GetNodes is a work-around until NEST 3.0 is released. It
            # will issue a deprecation warning.
            local_nodes = nest.GetNodes(
                [0], {
                    'model': self.net_dict['neuron_model'],
                    'thread': thread
                    }, local_only=True
                )[0]
            vp = nest.GetStatus(local_nodes)[0]['vp']
            # vp is the same for all local nodes on the same thread
            nest.SetStatus(
                local_nodes, 'V_m', self.pyrngs[vp].normal(
                    self.net_dict['neuron_params']['V0_mean'],
                    self.net_dict['neuron_params']['V0_sd'],
                    len(local_nodes))
                    )