コード例 #1
0
 def connect_spiking_region_nodes(self):
     """Method to connect all Spiking brain region nodes among them."""
     # For every different type of connections between distinct Spiking region nodes' populations
     for i_conn, conn in enumerate(ensure_list(self._nodes_connections)):
         # ...form the connection for every distinct pair of Spiking nodes
         for source_index in conn["source_nodes"]:
             # ...get the source spiking brain region indice:
             i_source_node = np.where(
                 self.spiking_nodes_ids == source_index)[0][0]
             for target_index in conn["target_nodes"]:
                 # ...get the target spiking brain region indice:
                 i_target_node = np.where(
                     self.spiking_nodes_ids == target_index)[0][0]
                 # ...create a synapse parameters dictionary, from the configured inputs:
                 syn_spec = self.set_synapse(
                     conn["synapse_model"], conn["weight"](source_index,
                                                           target_index),
                     conn["delay"](source_index, target_index),
                     conn["receptor_type"](source_index, target_index))
                 if source_index != target_index:
                     # ...and as long as this is not a within node connection...
                     for conn_src in ensure_list(conn["source"]):
                         # ...and for every combination of source...
                         src_pop = self._spiking_brain[i_source_node][
                             conn_src]
                         for conn_trg in ensure_list(conn["target"]):
                             # ...and target population...
                             trg_pop = self._spiking_brain[i_target_node][
                                 conn_trg]
                             self.connect_two_populations(
                                 src_pop, conn["source_inds"], trg_pop,
                                 conn["target_inds"], conn['conn_spec'],
                                 syn_spec)
コード例 #2
0
    def __init__(self, tvb_simulator, spiking_network, spiking_nodes_ids, exclusive_nodes=False,
                 tvb_to_spiking_interfaces=None, spiking_to_tvb_interfaces=None):
        if isinstance(spiking_network, SpikingNetwork):
            self.spiking_network = spiking_network
        else:
            raise ValueError("Input spiking_network is not a SpikingNetwork object!\n%s" % str(spiking_network))
        self.exclusive_nodes = exclusive_nodes
        if isinstance(tvb_simulator, Simulator):
            self.tvb_simulator = tvb_simulator
            self.spiking_nodes_ids = np.array(ensure_list(spiking_nodes_ids))
            self.tvb_nodes_ids = list(range(self.tvb_connectivity.weights.shape[0]))
            if self.exclusive_nodes:
                try:
                    for i_n in self.spiking_nodes_ids:
                        self.tvb_nodes_ids.remove(i_n)
                except:
                    raise ValueError("Failed to compute tvb_nodes_ids from nest_nodes_ids %s "
                                     "and TVB connectivity of size %s!"
                                     % (str(self.spiking_nodes_ids), self.tvb_connectivity.number_of_regions))
            self.tvb_nodes_ids = np.array(self.tvb_nodes_ids)
            if self.tvb_simulator.stimulus is not None:
                if np.sum(self.tvb_simulator.stimulus[:, self.spiking_nodes_ids.tolist()]):
                    raise ValueError("TVB-Spiking Network interface does not implement "
                                     "TVB stimulus application to Spiking Network nodes!\n"
                                     "The user has to configure such stimulus as part of the Spiking Network model, "
                                     "via appropriate Spiking Network input devices!")
        else:
            raise ValueError("Input simulator_tvb is not a Simulator object!\n%s" % str(tvb_simulator))

        # TVB <-> Spiking Network transformations' weights/funs
        # If set as weights, they will become a transformation function of
        # lambda state, regions_indices: w[regions_indices] * state[regions_indices]
        # If set as a function of lambda state: fun(state), it will become a vector function of:
        # lambda state, regions_indices: np.array([fun(state[index]) for index in regions_indices)])
        # TVB -> Spiking Network
        self.w_tvb_to_spike_rate = 1000.0  # (e.g., spike rate in NEST is in spikes/sec, assuming TVB rate is spikes/ms)
        self.w_tvb_to_current = 1000.0  # (1000.0 (nA -> pA), because I_e, and dc_generator amplitude in NEST are in pA)
        self.w_tvb_to_potential = 1.0  # assuming mV in both NEST and TVB
        # TVB <- Spiking Network
        # We return from a Spiking Network spike_detector
        # the ratio number_of_population_spikes / number_of_population_neurons
        # for every TVB time step, which is usually a quantity in the range [0.0, 1.0],
        # as long as a neuron cannot fire twice during a TVB time step, i.e.,
        # as long as the TVB time step (usually 0.001 to 0.1 ms)
        # is smaller than the neurons' refractory time, t_ref (usually 1-2 ms)
        # For conversion to a rate, one has to do:
        # w_spikes_to_tvb = 1/tvb_dt, to get it in spikes/ms, and
        # w_spikes_to_tvb = 1000/tvb_dt, to get it in Hz
        self.w_spikes_to_tvb = 1.0
        self.w_spikes_var_to_tvb = 1.0
        # We return from a Spiking Network multimeter or voltmeter the membrane potential in mV
        self.w_potential_to_tvb = 1.0

        if spiking_to_tvb_interfaces is not None:
            self.spikeNet_to_tvb_interfaces = ensure_list(spiking_to_tvb_interfaces)
        if tvb_to_spiking_interfaces is not None:
            self.tvb_to_spikeNet_interfaces = ensure_list(tvb_to_spiking_interfaces)
コード例 #3
0
    def concatenate(self, time_series_gen_or_seq, dim, **kwargs):
        out_time_series = None
        first = True
        for time_series in time_series_gen_or_seq:
            if first:
                out_time_series, select_funs = self.select(
                    time_series, **kwargs)
                dim_label = out_time_series.get_dimension_name(dim)
                first = False
            else:
                if np.float32(out_time_series.sample_period) != np.float32(
                        time_series.sample_period):
                    raise ValueError(
                        "Timeseries concatenation failed!\n"
                        "Timeseries have a different time step %s \n "
                        "than the concatenated ones %s!" %
                        (str(np.float32(time_series.sample_period)),
                         str(np.float32(out_time_series.sample_period))))
                else:
                    time_series = self.select(time_series, select_funs)[0]
                    labels_dimensions = dict(out_time_series.labels_dimensions)
                    out_labels = out_time_series.get_dimension_labels(dim)
                    if out_labels is not None and len(
                            out_labels) == out_time_series.shape[dim]:
                        time_series_labels = time_series.get_dimension_labels(
                            dim)
                        if time_series_labels is not None and len(
                                time_series_labels) == time_series.shape[dim]:
                            labels_dimensions[dim_label] = \
                                np.array(ensure_list(out_labels) + ensure_list(time_series_labels))
                        else:
                            del labels_dimensions[dim_label]
                            warning(
                                "Dimension labels for dimensions %s cannot be concatenated! "
                                "Deleting them!" % dim_label)
                    try:
                        out_data = np.concatenate(
                            [out_time_series.data, time_series.data], axis=dim)
                    except:
                        raise_value_error(
                            "Timeseries concatenation failed!\n"
                            "Timeseries have a shape %s and the concatenated ones %s!"
                            % (str(out_time_series.shape),
                               str(time_series.shape)))
                    out_time_series = out_time_series.duplicate(
                        data=out_data, labels_dimensions=labels_dimensions)
        if out_time_series is None:
            raise_value_error("Cannot concatenate empty list of TimeSeries!")

        return out_time_series
コード例 #4
0
 def _GetFromConnections(self, attrs=None, connections=None):
     """Method to get attributes of the connections from/to the SpikingPopulation's neurons.
         Arguments:
          connections: a Projection object or a collection (list, tuple, array) thereof.
                       Default = None, corresponding to all connections to/from the present population.
          attrs: collection (list, tuple, array) of the attributes to be included in the output.
                 Default = None, corresponds to all attributes
         Returns:
          Dictionary of lists (for the possible different Projection objects) of arrays of connections' attributes.
     """
     if connections is None:
         connections = self._GetConnections()
     if isinstance(connections, tuple):
         if len(connections) == 1:
             connections = connections[0]
         else:
             # In case we deal with both pre and post connections, treat them separately:
             outputs = []
             for connection in connections:
                 outputs.append(self._GetFromConnections(attrs, connection))
             return tuple(outputs)
     dictionary = {}
     for connection in ensure_list(connections):
         dictionary = {}
         if connection in self.projections_pre or connection in self.projections_post:
             if attrs is None:
                 attrs = np.union1d(self._default_connection_attrs,
                                    connection.attributes)
             else:
                 attrs = np.union1d(attrs, self._default_connection_attrs)
             for attribute in attrs:
                 self._set_attributes_of_connection_to_dict(
                     dictionary, connection, attribute)
     return dictionary
コード例 #5
0
 def connect_two_populations(self, pop_src, src_inds_fun, pop_trg,
                             trg_inds_fun, conn_spec, syn_spec):
     """Method to connect two NESTPopulation instances in the SpikingNetwork.
        Arguments:
         source: the source NESTPopulation of the connection
         src_inds_fun: a function that selects a subset of the souce population neurons
         target: the target NESTPopulation of the connection
         trg_inds_fun: a function that selects a subset of the target population neurons
         conn_params: a dict of parameters of the connectivity pattern among the neurons of the two populations,
                      excluding weight and delay ones
         synapse_params: a dict of parameters of the synapses among the neurons of the two populations,
                         including weight, delay and synaptic receptor type ones
     """
     # Prepare the parameters of connectivity:
     conn_spec = self._prepare_conn_spec(pop_src, pop_trg, conn_spec)
     # Prepare the parameters of the synapse:
     syn_spec = self._prepare_syn_spec(syn_spec)
     # We might create the same connection multiple times for different synaptic receptors...
     receptors = ensure_list(syn_spec["receptor_type"])
     for receptor in receptors:
         syn_spec["receptor_type"] = receptor
         self.nest_instance.Connect(
             get_populations_neurons(pop_src, src_inds_fun),
             get_populations_neurons(pop_trg, trg_inds_fun), conn_spec,
             syn_spec)
コード例 #6
0
def build_and_connect_devices(devices_input_dicts,
                              create_device_fun,
                              connect_device_fun,
                              spiking_nodes,
                              config=CONFIGURED,
                              **kwargs):
    """A method to build the final ANNarchyNetwork class based on the already created constituents.
       Build and connect devices by
       the variable they measure or stimulate, and population(s) they target (pandas.Series)
       and target node (pandas.Series) where they refer to.
    """
    devices = Series()
    for device_dict in ensure_list(devices_input_dicts):
        # For every distinct quantity to be measured from Spiking or stimulated towards Spiking nodes...
        dev_names = device_dict.get("names", None)
        if dev_names is None:  # If no devices' names are given...
            devices = devices.append(
                build_and_connect_devices_one_to_one(device_dict,
                                                     create_device_fun,
                                                     connect_device_fun,
                                                     spiking_nodes,
                                                     config=config,
                                                     **kwargs))
        else:
            devices = devices.append(
                build_and_connect_devices_one_to_many(device_dict,
                                                      create_device_fun,
                                                      connect_device_fun,
                                                      spiking_nodes,
                                                      dev_names,
                                                      config=config,
                                                      **kwargs))
    return devices
コード例 #7
0
    def _GetFromConnections(self, attrs=None, connections=None):
        """Method to get attributes of the connections from/to the SpikingPopulation's neurons.
            Arguments:
             attrs: collection (list, tuple, array) of the attributes to be included in the output.
                    Default = None, corresponds to all attributes
             connections: nest.SynapseCollection, or a tuple of outgoing and incoming nest.SynapseCollection instances
                          Default = None, corresponding to all connections to/from the present population.
            Returns:
             Dictionary of tuples of connections' attributes.

        """
        if connections is None:
            connections = self._GetConnections()
        if isinstance(connections, tuple):
            if len(connections) == 1:
                connections = connections[0]
            else:
                # In case we deal with both source and target connections, treat them separately:
                outputs = []
                for connection in connections:
                    outputs.append(self._GetFromConnections(attrs, connection))
                return tuple(outputs)
        if attrs is None:
            return connections.get()
        else:
            return connections.get(ensure_list(attrs))
コード例 #8
0
 def _SetToConnections(self, values_dict, connections=None):
     """Method to set attributes of the connections from/to the SpikingPopulation's neurons.
        Arguments:
          values_dict: dictionary of attributes names' and values.
          connections: a Projection object or a collection (list, tuple, array) thereof.
                       Default = None, corresponding to all connections to/from the present population.
     """
     if connections is None:
         connections = self._GetConnections()
     if isinstance(connections, tuple):
         if len(connections) == 1:
             connections = connections[0]
         else:
             # In case we deal with both pre and post connections, treat them separately:
             for connection in connections:
                 self._SetToConnections(values_dict, connection)
             return
     for connection in ensure_list(connections):
         if connection in self.projections_pre or connection in self.projections_post:
             # connection.set(values_dict) <- this would be straightforward, but can generate
             # arbitrary attributes that get ignored by the projection but are readable with get()
             # TODO: figure out why this is bad, if it doesn't cause an error/Exception!
             for attr, value in values_dict.items():
                 if hasattr(connection, attr):
                     connection.set({attr: value})
                 else:
                     raise AttributeError(
                         "Projection %s has no attribute named %s." %
                         (connection.name, attr))
         else:
             raise AttributeError(
                 "No incoming projection %s associated to this %s of model %s with label %s."
                 % (connection.name, self.__class__.__name__, self.model,
                    self.label))
 def __init__(self, interfaces, spiking_network, spiking_nodes_ids,
              tvb_nodes_ids, tvb_model, exclusive_nodes=False, config=CONFIGURED):
     self.interfaces = interfaces
     self.spiking_network = spiking_network
     self.spiking_nodes_ids = ensure_list(spiking_nodes_ids)
     self.tvb_nodes_ids = tvb_nodes_ids
     self.tvb_model = tvb_model
     self.exclusive_nodes = exclusive_nodes
     self.config = config
コード例 #10
0
 def _compile_select_funs(self, labels_ordering, **kwargs):
     select_funs = []
     for dim, lbl in enumerate(labels_ordering):
         indices_labels_slices = ensure_list(kwargs.pop(lbl, []))
         if len(indices_labels_slices) > 0:
             select_funs.append(
                 lambda ts: getattr(ts, "slice_data_across_dimension")
                 (indices_labels_slices, dim))
     return select_funs
コード例 #11
0
 def _convert_sequences_of_strings(self, sequence):
     new_sequence = []
     for val in ensure_list(sequence):
         if isinstance(val, string_types):
             new_sequence.append(numpy.string_(val))
         elif isinstance(val, (numpy.ndarray, tuple, list)):
             new_sequence.append(self._convert_sequences_of_strings(val))
         else:
             new_sequence.append(val)
     return numpy.array(new_sequence)
コード例 #12
0
 def _assert_connection_populations(self, connection):
     # This method will make sure that there are source and target user inputs for every population connection
     # and that every source/target population is already among the populations to be generated.
     for pop in ["source", "target"]:
         pops_labels = connection.get(pop, None)
         if pops_labels is None:
             raise_value_error("No %s population in connection!:\n%s" %
                               (pop, str(connection)))
         for pop_lbl in ensure_list(pops_labels):
             assert pop_lbl in self.populations_labels
 def _assert_input_size(self, values):
     values = ensure_list(values)
     n_vals = len(values)
     if n_vals not in [1, self.number_of_nodes]:
         raise ValueError("Values' number %d is neither equal to 1 "
                          "nor equal to nodes' number %d!" %
                          (n_vals, self.number_of_nodes))
     elif n_vals == 1:
         values *= self.number_of_nodes
     return values
コード例 #14
0
    def plot_spikes(self,
                    pop_spikes,
                    rates=None,
                    title=None,
                    figure_name=None,
                    figsize=None,
                    **kwargs):
        if isinstance(pop_spikes, DataArray):
            self.get_spikes_fun = lambda spikes, i_region: spikes[:, :,
                                                                  i_region
                                                                  ].values.squeeze(
                                                                  )
            self.get_time = lambda spikes, time: spikes.get_index(spikes.dims[
                0])
        else:
            self.get_spikes_fun = lambda spikes, i_region: spikes[:, :,
                                                                  i_region
                                                                  ].squeezed
            self.get_time = lambda spikes, time: time
        self._get_from_rates(rates)
        pop_spikes = ensure_list(pop_spikes)
        self.n_pops = len(pop_spikes)
        self.n_regions = np.max(
            [spikes.number_of_labels for spikes in pop_spikes])
        self._prepare_time_axes_adjustment()

        # Create figure
        title = self._default_title(title, rates is not None, figure_name=None)
        axes = []
        figure_name, figsize = self._get_figname_figsize(
            title, figure_name, figsize)
        pyplot.figure(figure_name, figsize=figsize)
        for i_pop, spikes in enumerate(pop_spikes):
            pop_label = spikes.labels_dimensions[spikes.labels_ordering[1]][0]
            spike_time = self.get_time(spikes, self.time)
            axes.append([])
            for i_region in range(spikes.number_of_labels):
                reg_label = spikes.labels_dimensions[
                    spikes.labels_ordering[2]][i_region]
                # Get spikes
                indices = np.argwhere(
                    self.get_spikes_fun(spikes, i_region) > 0.0)
                this_time = spike_time[indices[:, 0]]
                spike_senders_indices = spike_time[indices[:, 1]]
                self._neurons_axis_from_indices(spike_senders_indices)
                axes = self._plot(axes, i_pop, i_region, pop_label, reg_label,
                                  this_time, spike_senders_indices, kwargs)
        if self._adjust_time_axes_flag:
            axes = self._adjust_time_axes(axes)

        self._save_figure(pyplot.gcf(), figure_name)
        self._check_show()

        return pyplot.gcf(), axes
コード例 #15
0
 def _get_senders(self, population, neurons_ranks):
     population_ind = self.annarchy_instance.Global._network[0][
         "populations"].index(population)
     if len(self.monitors) > 1:
         senders = [
             "%d_%d" % (population_ind, neuron_rank)
             for neuron_rank in ensure_list(neurons_ranks)
         ]
         if len(senders) == 1:
             senders = senders[0]
         return senders
     else:
         return neurons_ranks
コード例 #16
0
def read_nest_output_device_data_from_ascii_to_dict(filepath):
    """This function reads data from a NEST recording device ascii file into an events dictionary
       Arguments:
        - filepath: absolute or relative path to the file (string)
       Returns:
        the events dictionary of the recorded data
    """
    recarray = rename_fields(
        np.genfromtxt(filepath, names=True, skip_header=2), {
            "sender": "senders",
            "time_ms": "times"
        })
    return {name: ensure_list(recarray[name]) for name in recarray.dtype.names}
コード例 #17
0
 def configure_compound_poisson_process(self):
     self._configure()
     if self.shift is not None:
         if not isinstance(self.shift, pq.unitquantity.UnitTime):
             self.shift *= pq.ms
     self.rate = _assert_shape(self.rate, np.ones(self._shape))
     self.rate = self.rate.flatten()
     if self.A is None:
         self.A = np.array([0.0, 1.0] + (self._size - 1) * [0.0])
     else:
         self.A = ensure_list(self.A)
         self.A += [0.0] * (self._size - len(self.A))
     self._configured = True
コード例 #18
0
 def confirm_compile_install_nest_models(self, models):
     """This method will try to confirm the existence of the input NEST models,
        and if they don't exist, it will try to install them,
        and possibly compile them, by determining the modules' names from the ones of the models.
        Arguments:
         models: a sequence (list, tuple) of the names (strings)
                 of the models to be confirmed, and/or installed and, possibly, compiled
     """
     nest_models = self.nest_instance.Models()
     models = ensure_list(models)
     for model in models:  # , module # zip(models, cycle(modules_to_install)):
         if model not in nest_models:
             self._compile_install_nest_module(model)
コード例 #19
0
 def _GetFromConnections(self, attrs=None, connections=None):
     """Method to get attributes of the connections from/to the device
        Arguments:
         attrs: collection (list, tuple, array) of the attributes to be included in the output.
                Default = None, correspondingn to all devices' attributes
         connections: A SynapseCollection. Default = None, corresponding to all device's connections
        Returns:
         Dictionary of lists of connections' attributes.
     """
     if connections is None:
         connections = self._GetConnections()
     if attrs is None:
         return connections.get()
     else:
         return connections.get(ensure_list(attrs))
コード例 #20
0
 def _Get(self, attrs=None, neurons=None):
     """Method to get attributes of the SpikingPopulation's neurons.
        Arguments:
         attrs: collection (list, tuple, array) of the attributes to be included in the output.
                Default = None, corresponding to all attributes
         neurons: instance of a NodeCollection class,
                  or sequence (list, tuple, array) of neurons the attributes of which should be set.
                  Default = None, corresponds to all neurons of the population.
        Returns:
         Dictionary of tuples of neurons' attributes.
     """
     if attrs is None:
         return self._assert_neurons(neurons).get()
     else:
         return self._assert_neurons(neurons).get(ensure_list(attrs))
コード例 #21
0
 def connect_within_node_spiking_populations(self):
     """Method to connect all populations withing each Spiking brain region node."""
     # For every different type of connections between distinct Spiking nodes' populations
     for i_conn, conn in enumerate(
             ensure_list(self._populations_connections)):
         # ...and for every brain region node where this connection will be created:
         for node_index in conn["nodes"]:
             i_node = np.where(self.spiking_nodes_ids == node_index)[0][0]
             # ...create a synapse parameters dictionary, from the configured inputs:
             syn_spec = self.set_synapse(
                 conn["synapse_model"], conn['weight'](node_index),
                 self._assert_delay(conn['delay'](node_index)),
                 conn['receptor_type'](node_index),
                 conn["params"](node_index))
             # ...and for every combination of source...
             for pop_src in ensure_list(conn["source"]):
                 # ...and target populations of this connection...
                 for pop_trg in ensure_list(conn["target"]):
                     # ...connect the two populations:
                     self.connect_two_populations(
                         self._spiking_brain[i_node][pop_src],
                         conn["source_inds"],
                         self._spiking_brain[i_node][pop_trg],
                         conn["target_inds"], conn["conn_spec"], syn_spec)
コード例 #22
0
    def record_from(self):
        if self._record_from:
            return self._record_from
        else:
            self._record_from = []
            for monitor, population in self.monitors.items():
                if len(self._record_from):
                    if np.any(self._record_from != monitor.variables):
                        raise ValueError(
                            "Monitor %s of Device %s records from variables %s,\n"
                            "which are different from the previously set ones %s!"
                            % (population.name, self.label,
                               str(monitor.variables), str(self._record_from)))

                else:
                    self._record_from = ensure_list(monitor.variables)
        return self._record_from
コード例 #23
0
 def _get_devices(self,
                  output_device_dict,
                  devices_type,
                  regions=None,
                  populations_devices=None,
                  **kwargs):
     """This method will return all Spiking Network's recording devices of a given type.
        Arguments:
         - output_device_dict: a dictionary mapping device names (keys) to device classes (values).
         - devices_type: the name of the device type (string).
         - regions: an optional sequence (list, tuple, array) of regions' nodes' indices to be selected.
                    Default = None, corresponding to returning the devices of all regions' nodes.
         - populations_devices: an optional sequence (list, tuple, array) of
                                populations' devices' labels to be selected.
                                Default = None, corresponding to returning the devices of all populations.
         - **kwargs: other keyword arguments. See get_spikes_devices method.
        Returns:
         - a Series of selected DeviceSet instances
     """
     devices = pd.Series()
     mode = kwargs.get("mode", None)
     if mode and mode.find("activity") > -1:
         devices = self.spiking_network.get_devices_by_model(
             "spike_multimeter", regions=regions)
     else:
         for device_name in output_device_dict.keys():
             devices = devices.append(
                 self.get_devices_by_model(device_name, regions=regions))
     if len(devices) == 0:
         LOG.warning(
             "No %s recording device in this Spiking Network network!" %
             devices_type)
         return devices
     if populations_devices is not None:
         populations_devices = np.intersect1d(
             list(devices.index),
             ensure_list(populations_devices)).tolist()
         if len(populations_devices) == 0:
             LOG.warning(
                 "No s recording device left after user selection!" %
                 devices_type)
             return devices
         devices = devices[populations_devices]
     return devices
コード例 #24
0
def build_and_connect_devices_one_to_many(device_dict,
                                          create_device_fun,
                                          connect_device_fun,
                                          spiking_nodes,
                                          names,
                                          config=CONFIGURED,
                                          **kwargs):
    """This function will create a DeviceSet for a measuring (output) or input (stimulating) quantity,
       whereby each device will target more than one SpikingRegionNode instances,
       e.g. as it is the case a TVB "proxy" node,
       stimulating several of the SpikingRegionNodes in the spiking network."""
    devices = Series()
    # Determine the connections from variables to measure/stimulate to Spiking node populations
    connections, device_target_nodes = _get_connections(
        device_dict, spiking_nodes)
    # Determine the device's parameters and connections' properties
    weights, delays, receptor_types, neurons_funs = \
        _get_device_props_with_correct_shape(device_dict, (len(names), len(device_target_nodes)))
    # For every Spiking population variable to be stimulated or measured...
    for pop_var, populations in connections.items():
        populations = ensure_list(populations)
        # This set of devices will be for variable pop_var...
        devices[pop_var] = DeviceSet(pop_var, device_dict["model"])
        # and for every target region node...
        for i_dev, dev_name in enumerate(names):
            # ...and populations' group...
            # create a device
            kwargs.update({"label": "%s_%s" % (pop_var, dev_name)})
            devices[pop_var][dev_name] = build_device(device_dict,
                                                      create_device_fun,
                                                      config=config,
                                                      **kwargs)
            # ...and loop through the target region nodes...
            for i_node, node in enumerate(device_target_nodes):
                # ...and populations' groups...
                # ...to connect it:
                for pop in populations:
                    devices[pop_var][dev_name] = \
                        connect_device_fun(devices[pop_var][dev_name], node[pop], neurons_funs[i_dev, i_node],
                                           weights[i_dev, i_node], delays[i_dev, i_node], receptor_types[i_dev, i_node],
                                           config=config, **kwargs)
        devices[pop_var].update()
    return devices
 def build_interface(self, interface, interface_id):
     # One interface for every combination of Spiking node
     # and TVB state variable to be transmitted
     # from TVB to Spiking Network
     connections = interface["connections"]
     if isinstance(connections, string_types):
         connections = {connections: slice(None)}  # return all population types
     default_parameter = self._build_target_class._available_input_parameters[interface["model"]]
     spiking_nodes_ids = interface.get("nodes", self.spiking_nodes_ids)
     if spiking_nodes_ids is None:
         spiking_nodes_ids = self.spiking_nodes_ids
     spiking_nodes_ids = list(spiking_nodes_ids)
     if self.exclusive_nodes:
         assert np.all(spiking_node not in self.tvb_nodes_ids for spiking_node in spiking_nodes_ids)
     interface_weight_fun = property_to_fun(interface.get("interface_weights", 1.0))
     # Default behavior for any region node and any combination of populations
     # is to target all of their neurons:
     neurons_inds_fun = interface.pop("neurons_inds", None)
     if neurons_inds_fun is not None:
         neurons_inds_fun = property_to_fun(neurons_inds_fun)
     shape = (len(spiking_nodes_ids),)
     interface_weights = np.ones(shape).astype("O")
     neurons_inds = np.tile([None], shape).astype("O")
     for i_node, spiking_node_id in enumerate(spiking_nodes_ids):
         interface_weights[i_node] = interface_weight_fun(spiking_node_id)
         if neurons_inds_fun is not None:
             neurons_inds[i_node] = lambda neurons_inds: neurons_inds_fun(spiking_node_id, neurons_inds)
     tvb_to_spikeNet_interfaces = Series()
     for name, populations in connections.items():
         try:
             tvb_coupling_id = self.tvb_model.cvar.tolist().index(
                 self.tvb_model.state_variables.index(name))
         except:
             raise_value_error("Failed to compute the coupling index of TVB state variable %s!" % name)
         interface_index = "%d_%s->%s" % (interface_id, name, str(populations))
         tvb_to_spikeNet_interfaces[interface_index] = \
             self._build_target_class(self.spiking_network, name, interface["model"],
                                      interface.get("parameter", default_parameter),
                                      tvb_coupling_id, spiking_nodes_ids, interface_weights)
         for i_node in spiking_nodes_ids:
             node = self.spiking_network.brain_regions[self.spiking_nodes_ids.index(i_node)]
             tvb_to_spikeNet_interfaces[interface_index][node.label] = node[ensure_list(populations)]
         return tvb_to_spikeNet_interfaces
コード例 #26
0
def build_and_connect_device(device,
                             create_device_fun,
                             connect_device_fun,
                             node,
                             populations,
                             inds_fun,
                             weight=1.0,
                             delay=0.0,
                             receptor_type=None,
                             config=CONFIGURED,
                             **kwargs):
    """This method will build a device and connect it to the spiking network
       based on the input create_device_fun, and connect_device_fun functions,
       which are specific to every spiking simulator.
       Arguments:
        device: either a name (string) of a device model, or a dictionary of properties for the device to build
        create_device_fun: a function to build the device
        connect_device_fun: a function to connect the device
        node: the target SpikingRegionNode class instance
        populations: target populations' labels:
        inds_fun: a function to select a subset of each population's neurons
        weight: the weight of the connection. Default = 1.0
        delay: the delay of the connection. Default = 0.0
        receptor_type: the synaptic receptor type of the connection. Default = None,
                       which will default in a way specific to each spiking simulator.
        config: a configuration class instance. Default = CONFIGURED (default configuration)
        **kwargs: other possible keyword arguments, to be passed to the device builder.
       Returns:
        the built and connected Device class instance
    """
    device = build_device(device, create_device_fun, config=config, **kwargs)
    for pop in ensure_list(populations):
        device = connect_device_fun(device,
                                    node[pop],
                                    inds_fun,
                                    weight,
                                    delay,
                                    receptor_type,
                                    config=config,
                                    **kwargs)
    device._number_of_connections = device.number_of_connections
    return device
コード例 #27
0
 def get_devices_by_model(self, model, regions=None):
     """This method will loop though all network's devices to return all devices of a given model.
        Arguments:
         - model: the model name (string),
         - regions: an optional sequence (list, tuple, array) of regions' nodes' indices to be selected.
                    Default = None, corresponding to returning the devices of all regions' nodes.
        Returns:
         - a Series of selected DeviceSet instances
     """
     # Get all devices set of a given model
     devices = pd.Series()
     if regions is None:
         get_device = lambda device, regions: device
     else:
         nodes = ensure_list(regions)
         get_device = lambda device, regions: device[nodes]
     for i_pop, (pop_label,
                 pop_device) in enumerate(self.output_devices.iteritems()):
         if pop_device.model == model:
             devices[pop_label] = get_device(pop_device, regions)
     return devices
コード例 #28
0
def connect_output_device(annarchy_device, population, neurons_inds_fun=None):
    """This function connect an ANNarchyOutputDevice to an ANNarchyPopulation instance,
       by also populating it with an ANNarchy Monitor.
           Arguments:
            annarchy_device: the ANNarchyInputDevice instance
            population: the ANNarchyPopulation instance
            neurons_inds_fun: a function to return an ANNarchy PopulationView of the target population. Default = None.
           Returns:
            the connected ANNarchyOutputDevice
        """
    neurons = get_populations_neurons(population, neurons_inds_fun)
    params = deepcopy(annarchy_device.params)
    record_from = ensure_list(params.pop("record_from"))
    # Create a connection by adding an ANNarchy Monitor targeting the specific neurons of this population:
    monitor = annarchy_device.annarchy_instance.Monitor(neurons, record_from, **params)
    monitor.name = "%s_%d" % (annarchy_device.label, len(annarchy_device.monitors) + 1)
    annarchy_device.monitors[monitor] = neurons
    # Update the number of connections and connected neurons to the device:
    annarchy_device._number_of_connections = annarchy_device.get_number_of_connections()
    annarchy_device._number_of_neurons = annarchy_device.get_number_of_neurons()
    return annarchy_device
コード例 #29
0
 def _assert_neurons(self, neurons=None):
     """Method to assert an input set of neurons either as:
         - the present instance of ANNarchy.Population class
         - a ANNarchy.PopulationView instance of the present instance of ANNarchy.Population class
         - a collection (tuple, list, numpy.ndarray) of global indices (i.e., tuples of (population_inds, neuron_ind),
           of the present instance of ANNarchy.Population class, or of local indices thereof,
         Default input = None, which corresponds to the present instance of ANNarchy.Population class.
     """
     if neurons is None:
         neurons = self._population
     else:
         self._assert_annarchy()
         if isinstance(neurons, self.annarchy_instance.Population):
             # Assert that we refer to this object's Population
             assert self._population == neurons
         elif isinstance(neurons, self.annarchy_instance.PopulationView):
             # Assert that we refer to a view of this object's Population
             assert self._population == neurons.population
         else:
             # Let's check if these are global or local indices of neurons...
             local_inds = []
             for neuron in ensure_list(neurons):
                 if isinstance(neuron, (tuple, list)):
                     # If neurons are global_ids formed as tuples of (population_ind, neuron_ind)...
                     if neuron[0] == self.population_ind:
                         # ... confirm that the population_ind is correct and get the neuron_ind
                         local_inds.append(neuron[1])
                         # If neurons are just local inds, gather them...
                     elif is_integer(neuron):
                         local_inds.append(neuron)
                     else:
                         raise ValueError(
                             "neurons %s\nis neither an instance of ANNarchy.Population, "
                             "nor of  ANNarchy.PopulationView,\n"
                             "nor is it a collection (tuple, list, or numpy.ndarray) "
                             "of global (tuple of (population_inds, neuron_ind) or local indices of neurons!"
                         )
                     # Return a Population View:
                     neurons = self._population[neurons]
     return neurons
コード例 #30
0
 def __init__(self,
              targets=["spike"],
              rate=10.0,
              A=None,
              number_of_regions=1,
              number_of_neurons=1,
              target_regions=None,
              t_start=0.0,
              dt=0.1,
              time_length=1000,
              shift=None,
              refractory_period=None,
              sparse=None,
              squeeze=False,
              return_type="Dict",
              return_array_type="Numpy",
              logger=None):
     self.targets = ensure_list(targets)
     self.numper_of_targets = len(self.targets)
     if self.numper_of_targets < 1:
         raise ValueError("No target parameters have been provided!")
     self.rate = rate
     self.A = A
     self.number_of_regions = number_of_regions
     self.number_of_neurons = number_of_neurons
     self.target_regions = target_regions
     self.t_start = t_start * pq.ms
     self.dt = dt * pq.ms
     self.time_length = time_length
     self.shift = shift
     self.refractory_period = refractory_period
     self._sparse = sparse
     self._shape = None
     self._squeeze = squeeze
     self._return = return_type
     self._return_array = return_array_type
     self._log = logger
     if self._log is None:
         self._log = initialize_logger(__name__, os.path.dirname(__file__))